1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use gpui::{AppContext, Pixels};
7use language_model::{CloudModel, LanguageModel};
8use lmstudio::Model as LmStudioModel;
9use ollama::Model as OllamaModel;
10use schemars::{schema::Schema, JsonSchema};
11use serde::{Deserialize, Serialize};
12use settings::{Settings, SettingsSources};
13
14#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
15#[serde(rename_all = "snake_case")]
16pub enum AssistantDockPosition {
17 Left,
18 #[default]
19 Right,
20 Bottom,
21}
22
23#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
24#[serde(tag = "name", rename_all = "snake_case")]
25pub enum AssistantProviderContentV1 {
26 #[serde(rename = "zed.dev")]
27 ZedDotDev { default_model: Option<CloudModel> },
28 #[serde(rename = "openai")]
29 OpenAi {
30 default_model: Option<OpenAiModel>,
31 api_url: Option<String>,
32 available_models: Option<Vec<OpenAiModel>>,
33 },
34 #[serde(rename = "anthropic")]
35 Anthropic {
36 default_model: Option<AnthropicModel>,
37 api_url: Option<String>,
38 },
39 #[serde(rename = "ollama")]
40 Ollama {
41 default_model: Option<OllamaModel>,
42 api_url: Option<String>,
43 },
44 #[serde(rename = "lmstudio")]
45 LmStudio {
46 default_model: Option<LmStudioModel>,
47 api_url: Option<String>,
48 },
49}
50
51#[derive(Debug, Default)]
52pub struct AssistantSettings {
53 pub enabled: bool,
54 pub button: bool,
55 pub dock: AssistantDockPosition,
56 pub default_width: Pixels,
57 pub default_height: Pixels,
58 pub default_model: LanguageModelSelection,
59 pub inline_alternatives: Vec<LanguageModelSelection>,
60 pub using_outdated_settings_version: bool,
61 pub enable_experimental_live_diffs: bool,
62}
63
64impl AssistantSettings {
65 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
66 cx.is_staff() || self.enable_experimental_live_diffs
67 }
68}
69
70/// Assistant panel settings
71#[derive(Clone, Serialize, Deserialize, Debug)]
72#[serde(untagged)]
73pub enum AssistantSettingsContent {
74 Versioned(VersionedAssistantSettingsContent),
75 Legacy(LegacyAssistantSettingsContent),
76}
77
78impl JsonSchema for AssistantSettingsContent {
79 fn schema_name() -> String {
80 VersionedAssistantSettingsContent::schema_name()
81 }
82
83 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
84 VersionedAssistantSettingsContent::json_schema(gen)
85 }
86
87 fn is_referenceable() -> bool {
88 VersionedAssistantSettingsContent::is_referenceable()
89 }
90}
91
92impl Default for AssistantSettingsContent {
93 fn default() -> Self {
94 Self::Versioned(VersionedAssistantSettingsContent::default())
95 }
96}
97
98impl AssistantSettingsContent {
99 pub fn is_version_outdated(&self) -> bool {
100 match self {
101 AssistantSettingsContent::Versioned(settings) => match settings {
102 VersionedAssistantSettingsContent::V1(_) => true,
103 VersionedAssistantSettingsContent::V2(_) => false,
104 },
105 AssistantSettingsContent::Legacy(_) => true,
106 }
107 }
108
109 fn upgrade(&self) -> AssistantSettingsContentV2 {
110 match self {
111 AssistantSettingsContent::Versioned(settings) => match settings {
112 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
113 enabled: settings.enabled,
114 button: settings.button,
115 dock: settings.dock,
116 default_width: settings.default_width,
117 default_height: settings.default_width,
118 default_model: settings
119 .provider
120 .clone()
121 .and_then(|provider| match provider {
122 AssistantProviderContentV1::ZedDotDev { default_model } => {
123 default_model.map(|model| LanguageModelSelection {
124 provider: "zed.dev".to_string(),
125 model: model.id().to_string(),
126 })
127 }
128 AssistantProviderContentV1::OpenAi { default_model, .. } => {
129 default_model.map(|model| LanguageModelSelection {
130 provider: "openai".to_string(),
131 model: model.id().to_string(),
132 })
133 }
134 AssistantProviderContentV1::Anthropic { default_model, .. } => {
135 default_model.map(|model| LanguageModelSelection {
136 provider: "anthropic".to_string(),
137 model: model.id().to_string(),
138 })
139 }
140 AssistantProviderContentV1::Ollama { default_model, .. } => {
141 default_model.map(|model| LanguageModelSelection {
142 provider: "ollama".to_string(),
143 model: model.id().to_string(),
144 })
145 }
146 AssistantProviderContentV1::LmStudio { default_model, .. } => {
147 default_model.map(|model| LanguageModelSelection {
148 provider: "lmstudio".to_string(),
149 model: model.id().to_string(),
150 })
151 }
152 }),
153 inline_alternatives: None,
154 enable_experimental_live_diffs: None,
155 },
156 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
157 },
158 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
159 enabled: None,
160 button: settings.button,
161 dock: settings.dock,
162 default_width: settings.default_width,
163 default_height: settings.default_height,
164 default_model: Some(LanguageModelSelection {
165 provider: "openai".to_string(),
166 model: settings
167 .default_open_ai_model
168 .clone()
169 .unwrap_or_default()
170 .id()
171 .to_string(),
172 }),
173 inline_alternatives: None,
174 enable_experimental_live_diffs: None,
175 },
176 }
177 }
178
179 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
180 match self {
181 AssistantSettingsContent::Versioned(settings) => match settings {
182 VersionedAssistantSettingsContent::V1(settings) => {
183 settings.dock = Some(dock);
184 }
185 VersionedAssistantSettingsContent::V2(settings) => {
186 settings.dock = Some(dock);
187 }
188 },
189 AssistantSettingsContent::Legacy(settings) => {
190 settings.dock = Some(dock);
191 }
192 }
193 }
194
195 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
196 let model = language_model.id().0.to_string();
197 let provider = language_model.provider_id().0.to_string();
198
199 match self {
200 AssistantSettingsContent::Versioned(settings) => match settings {
201 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
202 "zed.dev" => {
203 log::warn!("attempted to set zed.dev model on outdated settings");
204 }
205 "anthropic" => {
206 let api_url = match &settings.provider {
207 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
208 api_url.clone()
209 }
210 _ => None,
211 };
212 settings.provider = Some(AssistantProviderContentV1::Anthropic {
213 default_model: AnthropicModel::from_id(&model).ok(),
214 api_url,
215 });
216 }
217 "ollama" => {
218 let api_url = match &settings.provider {
219 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
220 api_url.clone()
221 }
222 _ => None,
223 };
224 settings.provider = Some(AssistantProviderContentV1::Ollama {
225 default_model: Some(ollama::Model::new(&model, None, None)),
226 api_url,
227 });
228 }
229 "lmstudio" => {
230 let api_url = match &settings.provider {
231 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
232 api_url.clone()
233 }
234 _ => None,
235 };
236 settings.provider = Some(AssistantProviderContentV1::LmStudio {
237 default_model: Some(lmstudio::Model::new(&model, None, None)),
238 api_url,
239 });
240 }
241 "openai" => {
242 let (api_url, available_models) = match &settings.provider {
243 Some(AssistantProviderContentV1::OpenAi {
244 api_url,
245 available_models,
246 ..
247 }) => (api_url.clone(), available_models.clone()),
248 _ => (None, None),
249 };
250 settings.provider = Some(AssistantProviderContentV1::OpenAi {
251 default_model: OpenAiModel::from_id(&model).ok(),
252 api_url,
253 available_models,
254 });
255 }
256 _ => {}
257 },
258 VersionedAssistantSettingsContent::V2(settings) => {
259 settings.default_model = Some(LanguageModelSelection { provider, model });
260 }
261 },
262 AssistantSettingsContent::Legacy(settings) => {
263 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
264 settings.default_open_ai_model = Some(model);
265 }
266 }
267 }
268 }
269}
270
271#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
272#[serde(tag = "version")]
273pub enum VersionedAssistantSettingsContent {
274 #[serde(rename = "1")]
275 V1(AssistantSettingsContentV1),
276 #[serde(rename = "2")]
277 V2(AssistantSettingsContentV2),
278}
279
280impl Default for VersionedAssistantSettingsContent {
281 fn default() -> Self {
282 Self::V2(AssistantSettingsContentV2 {
283 enabled: None,
284 button: None,
285 dock: None,
286 default_width: None,
287 default_height: None,
288 default_model: None,
289 inline_alternatives: None,
290 enable_experimental_live_diffs: None,
291 })
292 }
293}
294
295#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
296pub struct AssistantSettingsContentV2 {
297 /// Whether the Assistant is enabled.
298 ///
299 /// Default: true
300 enabled: Option<bool>,
301 /// Whether to show the assistant panel button in the status bar.
302 ///
303 /// Default: true
304 button: Option<bool>,
305 /// Where to dock the assistant.
306 ///
307 /// Default: right
308 dock: Option<AssistantDockPosition>,
309 /// Default width in pixels when the assistant is docked to the left or right.
310 ///
311 /// Default: 640
312 default_width: Option<f32>,
313 /// Default height in pixels when the assistant is docked to the bottom.
314 ///
315 /// Default: 320
316 default_height: Option<f32>,
317 /// The default model to use when creating new chats.
318 default_model: Option<LanguageModelSelection>,
319 /// Additional models with which to generate alternatives when performing inline assists.
320 inline_alternatives: Option<Vec<LanguageModelSelection>>,
321 /// Enable experimental live diffs in the assistant panel.
322 ///
323 /// Default: false
324 enable_experimental_live_diffs: Option<bool>,
325}
326
327#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
328pub struct LanguageModelSelection {
329 #[schemars(schema_with = "providers_schema")]
330 pub provider: String,
331 pub model: String,
332}
333
334fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
335 schemars::schema::SchemaObject {
336 enum_values: Some(vec![
337 "anthropic".into(),
338 "google".into(),
339 "lmstudio".into(),
340 "ollama".into(),
341 "openai".into(),
342 "zed.dev".into(),
343 "copilot_chat".into(),
344 ]),
345 ..Default::default()
346 }
347 .into()
348}
349
350impl Default for LanguageModelSelection {
351 fn default() -> Self {
352 Self {
353 provider: "openai".to_string(),
354 model: "gpt-4".to_string(),
355 }
356 }
357}
358
359#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
360pub struct AssistantSettingsContentV1 {
361 /// Whether the Assistant is enabled.
362 ///
363 /// Default: true
364 enabled: Option<bool>,
365 /// Whether to show the assistant panel button in the status bar.
366 ///
367 /// Default: true
368 button: Option<bool>,
369 /// Where to dock the assistant.
370 ///
371 /// Default: right
372 dock: Option<AssistantDockPosition>,
373 /// Default width in pixels when the assistant is docked to the left or right.
374 ///
375 /// Default: 640
376 default_width: Option<f32>,
377 /// Default height in pixels when the assistant is docked to the bottom.
378 ///
379 /// Default: 320
380 default_height: Option<f32>,
381 /// The provider of the assistant service.
382 ///
383 /// This can be "openai", "anthropic", "ollama", "lmstudio", "zed.dev"
384 /// each with their respective default models and configurations.
385 provider: Option<AssistantProviderContentV1>,
386}
387
388#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
389pub struct LegacyAssistantSettingsContent {
390 /// Whether to show the assistant panel button in the status bar.
391 ///
392 /// Default: true
393 pub button: Option<bool>,
394 /// Where to dock the assistant.
395 ///
396 /// Default: right
397 pub dock: Option<AssistantDockPosition>,
398 /// Default width in pixels when the assistant is docked to the left or right.
399 ///
400 /// Default: 640
401 pub default_width: Option<f32>,
402 /// Default height in pixels when the assistant is docked to the bottom.
403 ///
404 /// Default: 320
405 pub default_height: Option<f32>,
406 /// The default OpenAI model to use when creating new chats.
407 ///
408 /// Default: gpt-4-1106-preview
409 pub default_open_ai_model: Option<OpenAiModel>,
410 /// OpenAI API base URL to use when creating new chats.
411 ///
412 /// Default: https://api.openai.com/v1
413 pub openai_api_url: Option<String>,
414}
415
416impl Settings for AssistantSettings {
417 const KEY: Option<&'static str> = Some("assistant");
418
419 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
420
421 type FileContent = AssistantSettingsContent;
422
423 fn load(
424 sources: SettingsSources<Self::FileContent>,
425 _: &mut gpui::AppContext,
426 ) -> anyhow::Result<Self> {
427 let mut settings = AssistantSettings::default();
428
429 for value in sources.defaults_and_customizations() {
430 if value.is_version_outdated() {
431 settings.using_outdated_settings_version = true;
432 }
433
434 let value = value.upgrade();
435 merge(&mut settings.enabled, value.enabled);
436 merge(&mut settings.button, value.button);
437 merge(&mut settings.dock, value.dock);
438 merge(
439 &mut settings.default_width,
440 value.default_width.map(Into::into),
441 );
442 merge(
443 &mut settings.default_height,
444 value.default_height.map(Into::into),
445 );
446 merge(&mut settings.default_model, value.default_model);
447 merge(&mut settings.inline_alternatives, value.inline_alternatives);
448 merge(
449 &mut settings.enable_experimental_live_diffs,
450 value.enable_experimental_live_diffs,
451 );
452 }
453
454 Ok(settings)
455 }
456}
457
458fn merge<T>(target: &mut T, value: Option<T>) {
459 if let Some(value) = value {
460 *target = value;
461 }
462}
463
464#[cfg(test)]
465mod tests {
466 use fs::Fs;
467 use gpui::{ReadGlobal, TestAppContext};
468
469 use super::*;
470
471 #[gpui::test]
472 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
473 let fs = fs::FakeFs::new(cx.executor().clone());
474 fs.create_dir(paths::settings_file().parent().unwrap())
475 .await
476 .unwrap();
477
478 cx.update(|cx| {
479 let test_settings = settings::SettingsStore::test(cx);
480 cx.set_global(test_settings);
481 AssistantSettings::register(cx);
482 });
483
484 cx.update(|cx| {
485 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
486 assert_eq!(
487 AssistantSettings::get_global(cx).default_model,
488 LanguageModelSelection {
489 provider: "zed.dev".into(),
490 model: "claude-3-5-sonnet".into(),
491 }
492 );
493 });
494
495 cx.update(|cx| {
496 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
497 fs.clone(),
498 |settings, _| {
499 *settings = AssistantSettingsContent::Versioned(
500 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
501 default_model: Some(LanguageModelSelection {
502 provider: "test-provider".into(),
503 model: "gpt-99".into(),
504 }),
505 inline_alternatives: None,
506 enabled: None,
507 button: None,
508 dock: None,
509 default_width: None,
510 default_height: None,
511 enable_experimental_live_diffs: None,
512 }),
513 )
514 },
515 );
516 });
517
518 cx.run_until_parked();
519
520 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
521 assert!(raw_settings_value.contains(r#""version": "2""#));
522
523 #[derive(Debug, Deserialize)]
524 struct AssistantSettingsTest {
525 assistant: AssistantSettingsContent,
526 }
527
528 let assistant_settings: AssistantSettingsTest =
529 serde_json_lenient::from_str(&raw_settings_value).unwrap();
530
531 assert!(!assistant_settings.assistant.is_version_outdated());
532 }
533}