1use std::sync::Arc;
2
3use anthropic::Model as AnthropicModel;
4use fs::Fs;
5use gpui::{AppContext, Pixels};
6use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
7use ollama::Model as OllamaModel;
8use open_ai::Model as OpenAiModel;
9use schemars::{schema::Schema, JsonSchema};
10use serde::{Deserialize, Serialize};
11use settings::{update_settings_file, Settings, SettingsSources};
12
13#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
14#[serde(rename_all = "snake_case")]
15pub enum AssistantDockPosition {
16 Left,
17 #[default]
18 Right,
19 Bottom,
20}
21
22#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
23#[serde(tag = "name", rename_all = "snake_case")]
24pub enum AssistantProviderContentV1 {
25 #[serde(rename = "zed.dev")]
26 ZedDotDev { default_model: Option<CloudModel> },
27 #[serde(rename = "openai")]
28 OpenAi {
29 default_model: Option<OpenAiModel>,
30 api_url: Option<String>,
31 low_speed_timeout_in_seconds: Option<u64>,
32 available_models: Option<Vec<OpenAiModel>>,
33 },
34 #[serde(rename = "anthropic")]
35 Anthropic {
36 default_model: Option<AnthropicModel>,
37 api_url: Option<String>,
38 low_speed_timeout_in_seconds: Option<u64>,
39 },
40 #[serde(rename = "ollama")]
41 Ollama {
42 default_model: Option<OllamaModel>,
43 api_url: Option<String>,
44 low_speed_timeout_in_seconds: Option<u64>,
45 },
46}
47
48#[derive(Debug, Default)]
49pub struct AssistantSettings {
50 pub enabled: bool,
51 pub button: bool,
52 pub dock: AssistantDockPosition,
53 pub default_width: Pixels,
54 pub default_height: Pixels,
55 pub default_model: LanguageModelSelection,
56 pub using_outdated_settings_version: bool,
57}
58
59/// Assistant panel settings
60#[derive(Clone, Serialize, Deserialize, Debug)]
61#[serde(untagged)]
62pub enum AssistantSettingsContent {
63 Versioned(VersionedAssistantSettingsContent),
64 Legacy(LegacyAssistantSettingsContent),
65}
66
67impl JsonSchema for AssistantSettingsContent {
68 fn schema_name() -> String {
69 VersionedAssistantSettingsContent::schema_name()
70 }
71
72 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
73 VersionedAssistantSettingsContent::json_schema(gen)
74 }
75
76 fn is_referenceable() -> bool {
77 VersionedAssistantSettingsContent::is_referenceable()
78 }
79}
80
81impl Default for AssistantSettingsContent {
82 fn default() -> Self {
83 Self::Versioned(VersionedAssistantSettingsContent::default())
84 }
85}
86
87impl AssistantSettingsContent {
88 pub fn is_version_outdated(&self) -> bool {
89 match self {
90 AssistantSettingsContent::Versioned(settings) => match settings {
91 VersionedAssistantSettingsContent::V1(_) => true,
92 VersionedAssistantSettingsContent::V2(_) => false,
93 },
94 AssistantSettingsContent::Legacy(_) => true,
95 }
96 }
97
98 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
99 if let AssistantSettingsContent::Versioned(settings) = self {
100 if let VersionedAssistantSettingsContent::V1(settings) = settings {
101 if let Some(provider) = settings.provider.clone() {
102 match provider {
103 AssistantProviderContentV1::Anthropic {
104 api_url,
105 low_speed_timeout_in_seconds,
106 ..
107 } => update_settings_file::<AllLanguageModelSettings>(
108 fs,
109 cx,
110 move |content, _| {
111 if content.anthropic.is_none() {
112 content.anthropic =
113 Some(language_model::settings::AnthropicSettingsContent::Versioned(
114 language_model::settings::VersionedAnthropicSettingsContent::V1(
115 language_model::settings::AnthropicSettingsContentV1 {
116 api_url,
117 low_speed_timeout_in_seconds,
118 available_models: None
119 }
120 )
121 ));
122 }
123 },
124 ),
125 AssistantProviderContentV1::Ollama {
126 api_url,
127 low_speed_timeout_in_seconds,
128 ..
129 } => update_settings_file::<AllLanguageModelSettings>(
130 fs,
131 cx,
132 move |content, _| {
133 if content.ollama.is_none() {
134 content.ollama =
135 Some(language_model::settings::OllamaSettingsContent {
136 api_url,
137 low_speed_timeout_in_seconds,
138 available_models: None,
139 });
140 }
141 },
142 ),
143 AssistantProviderContentV1::OpenAi {
144 api_url,
145 low_speed_timeout_in_seconds,
146 available_models,
147 ..
148 } => update_settings_file::<AllLanguageModelSettings>(
149 fs,
150 cx,
151 move |content, _| {
152 if content.openai.is_none() {
153 let available_models = available_models.map(|models| {
154 models
155 .into_iter()
156 .filter_map(|model| match model {
157 open_ai::Model::Custom { name, max_tokens,max_output_tokens } => {
158 Some(language_model::provider::open_ai::AvailableModel { name, max_tokens,max_output_tokens })
159 }
160 _ => None,
161 })
162 .collect::<Vec<_>>()
163 });
164 content.openai =
165 Some(language_model::settings::OpenAiSettingsContent::Versioned(
166 language_model::settings::VersionedOpenAiSettingsContent::V1(
167 language_model::settings::OpenAiSettingsContentV1 {
168 api_url,
169 low_speed_timeout_in_seconds,
170 available_models
171 }
172 )
173 ));
174 }
175 },
176 ),
177 _ => {}
178 }
179 }
180 }
181 }
182
183 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
184 self.upgrade(),
185 ));
186 }
187
188 fn upgrade(&self) -> AssistantSettingsContentV2 {
189 match self {
190 AssistantSettingsContent::Versioned(settings) => match settings {
191 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
192 enabled: settings.enabled,
193 button: settings.button,
194 dock: settings.dock,
195 default_width: settings.default_width,
196 default_height: settings.default_width,
197 default_model: settings
198 .provider
199 .clone()
200 .and_then(|provider| match provider {
201 AssistantProviderContentV1::ZedDotDev { default_model } => {
202 default_model.map(|model| LanguageModelSelection {
203 provider: "zed.dev".to_string(),
204 model: model.id().to_string(),
205 })
206 }
207 AssistantProviderContentV1::OpenAi { default_model, .. } => {
208 default_model.map(|model| LanguageModelSelection {
209 provider: "openai".to_string(),
210 model: model.id().to_string(),
211 })
212 }
213 AssistantProviderContentV1::Anthropic { default_model, .. } => {
214 default_model.map(|model| LanguageModelSelection {
215 provider: "anthropic".to_string(),
216 model: model.id().to_string(),
217 })
218 }
219 AssistantProviderContentV1::Ollama { default_model, .. } => {
220 default_model.map(|model| LanguageModelSelection {
221 provider: "ollama".to_string(),
222 model: model.id().to_string(),
223 })
224 }
225 }),
226 },
227 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
228 },
229 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
230 enabled: None,
231 button: settings.button,
232 dock: settings.dock,
233 default_width: settings.default_width,
234 default_height: settings.default_height,
235 default_model: Some(LanguageModelSelection {
236 provider: "openai".to_string(),
237 model: settings
238 .default_open_ai_model
239 .clone()
240 .unwrap_or_default()
241 .id()
242 .to_string(),
243 }),
244 },
245 }
246 }
247
248 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
249 match self {
250 AssistantSettingsContent::Versioned(settings) => match settings {
251 VersionedAssistantSettingsContent::V1(settings) => {
252 settings.dock = Some(dock);
253 }
254 VersionedAssistantSettingsContent::V2(settings) => {
255 settings.dock = Some(dock);
256 }
257 },
258 AssistantSettingsContent::Legacy(settings) => {
259 settings.dock = Some(dock);
260 }
261 }
262 }
263
264 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
265 let model = language_model.id().0.to_string();
266 let provider = language_model.provider_id().0.to_string();
267
268 match self {
269 AssistantSettingsContent::Versioned(settings) => match settings {
270 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
271 "zed.dev" => {
272 log::warn!("attempted to set zed.dev model on outdated settings");
273 }
274 "anthropic" => {
275 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
276 Some(AssistantProviderContentV1::Anthropic {
277 api_url,
278 low_speed_timeout_in_seconds,
279 ..
280 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
281 _ => (None, None),
282 };
283 settings.provider = Some(AssistantProviderContentV1::Anthropic {
284 default_model: AnthropicModel::from_id(&model).ok(),
285 api_url,
286 low_speed_timeout_in_seconds,
287 });
288 }
289 "ollama" => {
290 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
291 Some(AssistantProviderContentV1::Ollama {
292 api_url,
293 low_speed_timeout_in_seconds,
294 ..
295 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
296 _ => (None, None),
297 };
298 settings.provider = Some(AssistantProviderContentV1::Ollama {
299 default_model: Some(ollama::Model::new(&model, None, None)),
300 api_url,
301 low_speed_timeout_in_seconds,
302 });
303 }
304 "openai" => {
305 let (api_url, low_speed_timeout_in_seconds, available_models) =
306 match &settings.provider {
307 Some(AssistantProviderContentV1::OpenAi {
308 api_url,
309 low_speed_timeout_in_seconds,
310 available_models,
311 ..
312 }) => (
313 api_url.clone(),
314 *low_speed_timeout_in_seconds,
315 available_models.clone(),
316 ),
317 _ => (None, None, None),
318 };
319 settings.provider = Some(AssistantProviderContentV1::OpenAi {
320 default_model: open_ai::Model::from_id(&model).ok(),
321 api_url,
322 low_speed_timeout_in_seconds,
323 available_models,
324 });
325 }
326 _ => {}
327 },
328 VersionedAssistantSettingsContent::V2(settings) => {
329 settings.default_model = Some(LanguageModelSelection { provider, model });
330 }
331 },
332 AssistantSettingsContent::Legacy(settings) => {
333 if let Ok(model) = open_ai::Model::from_id(&language_model.id().0) {
334 settings.default_open_ai_model = Some(model);
335 }
336 }
337 }
338 }
339}
340
341#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
342#[serde(tag = "version")]
343pub enum VersionedAssistantSettingsContent {
344 #[serde(rename = "1")]
345 V1(AssistantSettingsContentV1),
346 #[serde(rename = "2")]
347 V2(AssistantSettingsContentV2),
348}
349
350impl Default for VersionedAssistantSettingsContent {
351 fn default() -> Self {
352 Self::V2(AssistantSettingsContentV2 {
353 enabled: None,
354 button: None,
355 dock: None,
356 default_width: None,
357 default_height: None,
358 default_model: None,
359 })
360 }
361}
362
363#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
364pub struct AssistantSettingsContentV2 {
365 /// Whether the Assistant is enabled.
366 ///
367 /// Default: true
368 enabled: Option<bool>,
369 /// Whether to show the assistant panel button in the status bar.
370 ///
371 /// Default: true
372 button: Option<bool>,
373 /// Where to dock the assistant.
374 ///
375 /// Default: right
376 dock: Option<AssistantDockPosition>,
377 /// Default width in pixels when the assistant is docked to the left or right.
378 ///
379 /// Default: 640
380 default_width: Option<f32>,
381 /// Default height in pixels when the assistant is docked to the bottom.
382 ///
383 /// Default: 320
384 default_height: Option<f32>,
385 /// The default model to use when creating new contexts.
386 default_model: Option<LanguageModelSelection>,
387}
388
389#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
390pub struct LanguageModelSelection {
391 #[schemars(schema_with = "providers_schema")]
392 pub provider: String,
393 pub model: String,
394}
395
396fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
397 schemars::schema::SchemaObject {
398 enum_values: Some(vec![
399 "anthropic".into(),
400 "google".into(),
401 "ollama".into(),
402 "openai".into(),
403 "zed.dev".into(),
404 "copilot_chat".into(),
405 ]),
406 ..Default::default()
407 }
408 .into()
409}
410
411impl Default for LanguageModelSelection {
412 fn default() -> Self {
413 Self {
414 provider: "openai".to_string(),
415 model: "gpt-4".to_string(),
416 }
417 }
418}
419
420#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
421pub struct AssistantSettingsContentV1 {
422 /// Whether the Assistant is enabled.
423 ///
424 /// Default: true
425 enabled: Option<bool>,
426 /// Whether to show the assistant panel button in the status bar.
427 ///
428 /// Default: true
429 button: Option<bool>,
430 /// Where to dock the assistant.
431 ///
432 /// Default: right
433 dock: Option<AssistantDockPosition>,
434 /// Default width in pixels when the assistant is docked to the left or right.
435 ///
436 /// Default: 640
437 default_width: Option<f32>,
438 /// Default height in pixels when the assistant is docked to the bottom.
439 ///
440 /// Default: 320
441 default_height: Option<f32>,
442 /// The provider of the assistant service.
443 ///
444 /// This can be "openai", "anthropic", "ollama", "zed.dev"
445 /// each with their respective default models and configurations.
446 provider: Option<AssistantProviderContentV1>,
447}
448
449#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
450pub struct LegacyAssistantSettingsContent {
451 /// Whether to show the assistant panel button in the status bar.
452 ///
453 /// Default: true
454 pub button: Option<bool>,
455 /// Where to dock the assistant.
456 ///
457 /// Default: right
458 pub dock: Option<AssistantDockPosition>,
459 /// Default width in pixels when the assistant is docked to the left or right.
460 ///
461 /// Default: 640
462 pub default_width: Option<f32>,
463 /// Default height in pixels when the assistant is docked to the bottom.
464 ///
465 /// Default: 320
466 pub default_height: Option<f32>,
467 /// The default OpenAI model to use when creating new contexts.
468 ///
469 /// Default: gpt-4-1106-preview
470 pub default_open_ai_model: Option<OpenAiModel>,
471 /// OpenAI API base URL to use when creating new contexts.
472 ///
473 /// Default: https://api.openai.com/v1
474 pub openai_api_url: Option<String>,
475}
476
477impl Settings for AssistantSettings {
478 const KEY: Option<&'static str> = Some("assistant");
479
480 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
481
482 type FileContent = AssistantSettingsContent;
483
484 fn load(
485 sources: SettingsSources<Self::FileContent>,
486 _: &mut gpui::AppContext,
487 ) -> anyhow::Result<Self> {
488 let mut settings = AssistantSettings::default();
489
490 for value in sources.defaults_and_customizations() {
491 if value.is_version_outdated() {
492 settings.using_outdated_settings_version = true;
493 }
494
495 let value = value.upgrade();
496 merge(&mut settings.enabled, value.enabled);
497 merge(&mut settings.button, value.button);
498 merge(&mut settings.dock, value.dock);
499 merge(
500 &mut settings.default_width,
501 value.default_width.map(Into::into),
502 );
503 merge(
504 &mut settings.default_height,
505 value.default_height.map(Into::into),
506 );
507 merge(
508 &mut settings.default_model,
509 value.default_model.map(Into::into),
510 );
511 }
512
513 Ok(settings)
514 }
515}
516
517fn merge<T>(target: &mut T, value: Option<T>) {
518 if let Some(value) = value {
519 *target = value;
520 }
521}
522
523#[cfg(test)]
524mod tests {
525 use gpui::{ReadGlobal, TestAppContext};
526
527 use super::*;
528
529 #[gpui::test]
530 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
531 let fs = fs::FakeFs::new(cx.executor().clone());
532 fs.create_dir(paths::settings_file().parent().unwrap())
533 .await
534 .unwrap();
535
536 cx.update(|cx| {
537 let test_settings = settings::SettingsStore::test(cx);
538 cx.set_global(test_settings);
539 AssistantSettings::register(cx);
540 });
541
542 cx.update(|cx| {
543 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
544 assert_eq!(
545 AssistantSettings::get_global(cx).default_model,
546 LanguageModelSelection {
547 provider: "zed.dev".into(),
548 model: "claude-3-5-sonnet".into(),
549 }
550 );
551 });
552
553 cx.update(|cx| {
554 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
555 fs.clone(),
556 |settings, _| {
557 *settings = AssistantSettingsContent::Versioned(
558 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
559 default_model: Some(LanguageModelSelection {
560 provider: "test-provider".into(),
561 model: "gpt-99".into(),
562 }),
563 enabled: None,
564 button: None,
565 dock: None,
566 default_width: None,
567 default_height: None,
568 }),
569 )
570 },
571 );
572 });
573
574 cx.run_until_parked();
575
576 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
577 assert!(raw_settings_value.contains(r#""version": "2""#));
578
579 #[derive(Debug, Deserialize)]
580 struct AssistantSettingsTest {
581 assistant: AssistantSettingsContent,
582 }
583
584 let assistant_settings: AssistantSettingsTest =
585 serde_json_lenient::from_str(&raw_settings_value).unwrap();
586
587 assert!(!assistant_settings.assistant.is_version_outdated());
588 }
589}