1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use fs::Fs;
7use gpui::{AppContext, Pixels};
8use language_model::provider::open_ai;
9use language_model::settings::{
10 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
11 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
12 VersionedOpenAiSettingsContent,
13};
14use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
15use ollama::Model as OllamaModel;
16use schemars::{schema::Schema, JsonSchema};
17use serde::{Deserialize, Serialize};
18use settings::{update_settings_file, Settings, SettingsSources};
19
20#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
21#[serde(rename_all = "snake_case")]
22pub enum AssistantDockPosition {
23 Left,
24 #[default]
25 Right,
26 Bottom,
27}
28
29#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
30#[serde(tag = "name", rename_all = "snake_case")]
31pub enum AssistantProviderContentV1 {
32 #[serde(rename = "zed.dev")]
33 ZedDotDev { default_model: Option<CloudModel> },
34 #[serde(rename = "openai")]
35 OpenAi {
36 default_model: Option<OpenAiModel>,
37 api_url: Option<String>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 },
45 #[serde(rename = "ollama")]
46 Ollama {
47 default_model: Option<OllamaModel>,
48 api_url: Option<String>,
49 },
50}
51
52#[derive(Debug, Default)]
53pub struct AssistantSettings {
54 pub enabled: bool,
55 pub button: bool,
56 pub dock: AssistantDockPosition,
57 pub default_width: Pixels,
58 pub default_height: Pixels,
59 pub default_model: LanguageModelSelection,
60 pub inline_alternatives: Vec<LanguageModelSelection>,
61 pub using_outdated_settings_version: bool,
62 pub enable_experimental_live_diffs: bool,
63 pub show_hints: bool,
64}
65
66impl AssistantSettings {
67 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
68 cx.is_staff() || self.enable_experimental_live_diffs
69 }
70}
71
72/// Assistant panel settings
73#[derive(Clone, Serialize, Deserialize, Debug)]
74#[serde(untagged)]
75pub enum AssistantSettingsContent {
76 Versioned(VersionedAssistantSettingsContent),
77 Legacy(LegacyAssistantSettingsContent),
78}
79
80impl JsonSchema for AssistantSettingsContent {
81 fn schema_name() -> String {
82 VersionedAssistantSettingsContent::schema_name()
83 }
84
85 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
86 VersionedAssistantSettingsContent::json_schema(gen)
87 }
88
89 fn is_referenceable() -> bool {
90 VersionedAssistantSettingsContent::is_referenceable()
91 }
92}
93
94impl Default for AssistantSettingsContent {
95 fn default() -> Self {
96 Self::Versioned(VersionedAssistantSettingsContent::default())
97 }
98}
99
100impl AssistantSettingsContent {
101 pub fn is_version_outdated(&self) -> bool {
102 match self {
103 AssistantSettingsContent::Versioned(settings) => match settings {
104 VersionedAssistantSettingsContent::V1(_) => true,
105 VersionedAssistantSettingsContent::V2(_) => false,
106 },
107 AssistantSettingsContent::Legacy(_) => true,
108 }
109 }
110
111 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
112 if let AssistantSettingsContent::Versioned(settings) = self {
113 if let VersionedAssistantSettingsContent::V1(settings) = settings {
114 if let Some(provider) = settings.provider.clone() {
115 match provider {
116 AssistantProviderContentV1::Anthropic { api_url, .. } => {
117 update_settings_file::<AllLanguageModelSettings>(
118 fs,
119 cx,
120 move |content, _| {
121 if content.anthropic.is_none() {
122 content.anthropic =
123 Some(AnthropicSettingsContent::Versioned(
124 VersionedAnthropicSettingsContent::V1(
125 AnthropicSettingsContentV1 {
126 api_url,
127 available_models: None,
128 },
129 ),
130 ));
131 }
132 },
133 )
134 }
135 AssistantProviderContentV1::Ollama { api_url, .. } => {
136 update_settings_file::<AllLanguageModelSettings>(
137 fs,
138 cx,
139 move |content, _| {
140 if content.ollama.is_none() {
141 content.ollama = Some(OllamaSettingsContent {
142 api_url,
143 available_models: None,
144 });
145 }
146 },
147 )
148 }
149 AssistantProviderContentV1::OpenAi {
150 api_url,
151 available_models,
152 ..
153 } => update_settings_file::<AllLanguageModelSettings>(
154 fs,
155 cx,
156 move |content, _| {
157 if content.openai.is_none() {
158 let available_models = available_models.map(|models| {
159 models
160 .into_iter()
161 .filter_map(|model| match model {
162 OpenAiModel::Custom {
163 name,
164 display_name,
165 max_tokens,
166 max_output_tokens,
167 max_completion_tokens: None,
168 } => Some(open_ai::AvailableModel {
169 name,
170 display_name,
171 max_tokens,
172 max_output_tokens,
173 max_completion_tokens: None,
174 }),
175 _ => None,
176 })
177 .collect::<Vec<_>>()
178 });
179 content.openai = Some(OpenAiSettingsContent::Versioned(
180 VersionedOpenAiSettingsContent::V1(
181 OpenAiSettingsContentV1 {
182 api_url,
183 available_models,
184 },
185 ),
186 ));
187 }
188 },
189 ),
190 _ => {}
191 }
192 }
193 }
194 }
195
196 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
197 self.upgrade(),
198 ));
199 }
200
201 fn upgrade(&self) -> AssistantSettingsContentV2 {
202 match self {
203 AssistantSettingsContent::Versioned(settings) => match settings {
204 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
205 enabled: settings.enabled,
206 show_hints: None,
207 button: settings.button,
208 dock: settings.dock,
209 default_width: settings.default_width,
210 default_height: settings.default_width,
211 default_model: settings
212 .provider
213 .clone()
214 .and_then(|provider| match provider {
215 AssistantProviderContentV1::ZedDotDev { default_model } => {
216 default_model.map(|model| LanguageModelSelection {
217 provider: "zed.dev".to_string(),
218 model: model.id().to_string(),
219 })
220 }
221 AssistantProviderContentV1::OpenAi { default_model, .. } => {
222 default_model.map(|model| LanguageModelSelection {
223 provider: "openai".to_string(),
224 model: model.id().to_string(),
225 })
226 }
227 AssistantProviderContentV1::Anthropic { default_model, .. } => {
228 default_model.map(|model| LanguageModelSelection {
229 provider: "anthropic".to_string(),
230 model: model.id().to_string(),
231 })
232 }
233 AssistantProviderContentV1::Ollama { default_model, .. } => {
234 default_model.map(|model| LanguageModelSelection {
235 provider: "ollama".to_string(),
236 model: model.id().to_string(),
237 })
238 }
239 }),
240 inline_alternatives: None,
241 enable_experimental_live_diffs: None,
242 },
243 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
244 },
245 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
246 enabled: None,
247 show_hints: None,
248 button: settings.button,
249 dock: settings.dock,
250 default_width: settings.default_width,
251 default_height: settings.default_height,
252 default_model: Some(LanguageModelSelection {
253 provider: "openai".to_string(),
254 model: settings
255 .default_open_ai_model
256 .clone()
257 .unwrap_or_default()
258 .id()
259 .to_string(),
260 }),
261 inline_alternatives: None,
262 enable_experimental_live_diffs: None,
263 },
264 }
265 }
266
267 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
268 match self {
269 AssistantSettingsContent::Versioned(settings) => match settings {
270 VersionedAssistantSettingsContent::V1(settings) => {
271 settings.dock = Some(dock);
272 }
273 VersionedAssistantSettingsContent::V2(settings) => {
274 settings.dock = Some(dock);
275 }
276 },
277 AssistantSettingsContent::Legacy(settings) => {
278 settings.dock = Some(dock);
279 }
280 }
281 }
282
283 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
284 let model = language_model.id().0.to_string();
285 let provider = language_model.provider_id().0.to_string();
286
287 match self {
288 AssistantSettingsContent::Versioned(settings) => match settings {
289 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
290 "zed.dev" => {
291 log::warn!("attempted to set zed.dev model on outdated settings");
292 }
293 "anthropic" => {
294 let api_url = match &settings.provider {
295 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
296 api_url.clone()
297 }
298 _ => None,
299 };
300 settings.provider = Some(AssistantProviderContentV1::Anthropic {
301 default_model: AnthropicModel::from_id(&model).ok(),
302 api_url,
303 });
304 }
305 "ollama" => {
306 let api_url = match &settings.provider {
307 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
308 api_url.clone()
309 }
310 _ => None,
311 };
312 settings.provider = Some(AssistantProviderContentV1::Ollama {
313 default_model: Some(ollama::Model::new(&model, None, None)),
314 api_url,
315 });
316 }
317 "openai" => {
318 let (api_url, available_models) = match &settings.provider {
319 Some(AssistantProviderContentV1::OpenAi {
320 api_url,
321 available_models,
322 ..
323 }) => (api_url.clone(), available_models.clone()),
324 _ => (None, None),
325 };
326 settings.provider = Some(AssistantProviderContentV1::OpenAi {
327 default_model: OpenAiModel::from_id(&model).ok(),
328 api_url,
329 available_models,
330 });
331 }
332 _ => {}
333 },
334 VersionedAssistantSettingsContent::V2(settings) => {
335 settings.default_model = Some(LanguageModelSelection { provider, model });
336 }
337 },
338 AssistantSettingsContent::Legacy(settings) => {
339 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
340 settings.default_open_ai_model = Some(model);
341 }
342 }
343 }
344 }
345}
346
347#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
348#[serde(tag = "version")]
349pub enum VersionedAssistantSettingsContent {
350 #[serde(rename = "1")]
351 V1(AssistantSettingsContentV1),
352 #[serde(rename = "2")]
353 V2(AssistantSettingsContentV2),
354}
355
356impl Default for VersionedAssistantSettingsContent {
357 fn default() -> Self {
358 Self::V2(AssistantSettingsContentV2 {
359 enabled: None,
360 show_hints: None,
361 button: None,
362 dock: None,
363 default_width: None,
364 default_height: None,
365 default_model: None,
366 inline_alternatives: None,
367 enable_experimental_live_diffs: None,
368 })
369 }
370}
371
372#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
373pub struct AssistantSettingsContentV2 {
374 /// Whether the Assistant is enabled.
375 ///
376 /// Default: true
377 enabled: Option<bool>,
378 /// Whether to show inline hints that show keybindings for inline assistant
379 /// and assistant panel.
380 ///
381 /// Default: true
382 show_hints: Option<bool>,
383 /// Whether to show the assistant panel button in the status bar.
384 ///
385 /// Default: true
386 button: Option<bool>,
387 /// Where to dock the assistant.
388 ///
389 /// Default: right
390 dock: Option<AssistantDockPosition>,
391 /// Default width in pixels when the assistant is docked to the left or right.
392 ///
393 /// Default: 640
394 default_width: Option<f32>,
395 /// Default height in pixels when the assistant is docked to the bottom.
396 ///
397 /// Default: 320
398 default_height: Option<f32>,
399 /// The default model to use when creating new chats.
400 default_model: Option<LanguageModelSelection>,
401 /// Additional models with which to generate alternatives when performing inline assists.
402 inline_alternatives: Option<Vec<LanguageModelSelection>>,
403 /// Enable experimental live diffs in the assistant panel.
404 ///
405 /// Default: false
406 enable_experimental_live_diffs: Option<bool>,
407}
408
409#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
410pub struct LanguageModelSelection {
411 #[schemars(schema_with = "providers_schema")]
412 pub provider: String,
413 pub model: String,
414}
415
416fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
417 schemars::schema::SchemaObject {
418 enum_values: Some(vec![
419 "anthropic".into(),
420 "google".into(),
421 "ollama".into(),
422 "openai".into(),
423 "zed.dev".into(),
424 "copilot_chat".into(),
425 ]),
426 ..Default::default()
427 }
428 .into()
429}
430
431impl Default for LanguageModelSelection {
432 fn default() -> Self {
433 Self {
434 provider: "openai".to_string(),
435 model: "gpt-4".to_string(),
436 }
437 }
438}
439
440#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
441pub struct AssistantSettingsContentV1 {
442 /// Whether the Assistant is enabled.
443 ///
444 /// Default: true
445 enabled: Option<bool>,
446 /// Whether to show the assistant panel button in the status bar.
447 ///
448 /// Default: true
449 button: Option<bool>,
450 /// Where to dock the assistant.
451 ///
452 /// Default: right
453 dock: Option<AssistantDockPosition>,
454 /// Default width in pixels when the assistant is docked to the left or right.
455 ///
456 /// Default: 640
457 default_width: Option<f32>,
458 /// Default height in pixels when the assistant is docked to the bottom.
459 ///
460 /// Default: 320
461 default_height: Option<f32>,
462 /// The provider of the assistant service.
463 ///
464 /// This can be "openai", "anthropic", "ollama", "zed.dev"
465 /// each with their respective default models and configurations.
466 provider: Option<AssistantProviderContentV1>,
467}
468
469#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
470pub struct LegacyAssistantSettingsContent {
471 /// Whether to show the assistant panel button in the status bar.
472 ///
473 /// Default: true
474 pub button: Option<bool>,
475 /// Where to dock the assistant.
476 ///
477 /// Default: right
478 pub dock: Option<AssistantDockPosition>,
479 /// Default width in pixels when the assistant is docked to the left or right.
480 ///
481 /// Default: 640
482 pub default_width: Option<f32>,
483 /// Default height in pixels when the assistant is docked to the bottom.
484 ///
485 /// Default: 320
486 pub default_height: Option<f32>,
487 /// The default OpenAI model to use when creating new chats.
488 ///
489 /// Default: gpt-4-1106-preview
490 pub default_open_ai_model: Option<OpenAiModel>,
491 /// OpenAI API base URL to use when creating new chats.
492 ///
493 /// Default: https://api.openai.com/v1
494 pub openai_api_url: Option<String>,
495}
496
497impl Settings for AssistantSettings {
498 const KEY: Option<&'static str> = Some("assistant");
499
500 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
501
502 type FileContent = AssistantSettingsContent;
503
504 fn load(
505 sources: SettingsSources<Self::FileContent>,
506 _: &mut gpui::AppContext,
507 ) -> anyhow::Result<Self> {
508 let mut settings = AssistantSettings::default();
509
510 for value in sources.defaults_and_customizations() {
511 if value.is_version_outdated() {
512 settings.using_outdated_settings_version = true;
513 }
514
515 let value = value.upgrade();
516 merge(&mut settings.enabled, value.enabled);
517 merge(&mut settings.show_hints, value.show_hints);
518 merge(&mut settings.button, value.button);
519 merge(&mut settings.dock, value.dock);
520 merge(
521 &mut settings.default_width,
522 value.default_width.map(Into::into),
523 );
524 merge(
525 &mut settings.default_height,
526 value.default_height.map(Into::into),
527 );
528 merge(&mut settings.default_model, value.default_model);
529 merge(&mut settings.inline_alternatives, value.inline_alternatives);
530 merge(
531 &mut settings.enable_experimental_live_diffs,
532 value.enable_experimental_live_diffs,
533 );
534 }
535
536 Ok(settings)
537 }
538}
539
540fn merge<T>(target: &mut T, value: Option<T>) {
541 if let Some(value) = value {
542 *target = value;
543 }
544}
545
546#[cfg(test)]
547mod tests {
548 use gpui::{ReadGlobal, TestAppContext};
549
550 use super::*;
551
552 #[gpui::test]
553 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
554 let fs = fs::FakeFs::new(cx.executor().clone());
555 fs.create_dir(paths::settings_file().parent().unwrap())
556 .await
557 .unwrap();
558
559 cx.update(|cx| {
560 let test_settings = settings::SettingsStore::test(cx);
561 cx.set_global(test_settings);
562 AssistantSettings::register(cx);
563 });
564
565 cx.update(|cx| {
566 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
567 assert_eq!(
568 AssistantSettings::get_global(cx).default_model,
569 LanguageModelSelection {
570 provider: "zed.dev".into(),
571 model: "claude-3-5-sonnet".into(),
572 }
573 );
574 });
575
576 cx.update(|cx| {
577 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
578 fs.clone(),
579 |settings, _| {
580 *settings = AssistantSettingsContent::Versioned(
581 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
582 default_model: Some(LanguageModelSelection {
583 provider: "test-provider".into(),
584 model: "gpt-99".into(),
585 }),
586 inline_alternatives: None,
587 enabled: None,
588 show_hints: None,
589 button: None,
590 dock: None,
591 default_width: None,
592 default_height: None,
593 enable_experimental_live_diffs: None,
594 }),
595 )
596 },
597 );
598 });
599
600 cx.run_until_parked();
601
602 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
603 assert!(raw_settings_value.contains(r#""version": "2""#));
604
605 #[derive(Debug, Deserialize)]
606 struct AssistantSettingsTest {
607 assistant: AssistantSettingsContent,
608 }
609
610 let assistant_settings: AssistantSettingsTest =
611 serde_json_lenient::from_str(&raw_settings_value).unwrap();
612
613 assert!(!assistant_settings.assistant.is_version_outdated());
614 }
615}