1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use fs::Fs;
7use gpui::{AppContext, Pixels};
8use language_model::{CloudModel, LanguageModel};
9use language_models::{
10 provider::open_ai, AllLanguageModelSettings, AnthropicSettingsContent,
11 AnthropicSettingsContentV1, OllamaSettingsContent, OpenAiSettingsContent,
12 OpenAiSettingsContentV1, VersionedAnthropicSettingsContent, VersionedOpenAiSettingsContent,
13};
14use ollama::Model as OllamaModel;
15use schemars::{schema::Schema, JsonSchema};
16use serde::{Deserialize, Serialize};
17use settings::{update_settings_file, Settings, SettingsSources};
18
19#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
20#[serde(rename_all = "snake_case")]
21pub enum AssistantDockPosition {
22 Left,
23 #[default]
24 Right,
25 Bottom,
26}
27
28#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
29#[serde(tag = "name", rename_all = "snake_case")]
30pub enum AssistantProviderContentV1 {
31 #[serde(rename = "zed.dev")]
32 ZedDotDev { default_model: Option<CloudModel> },
33 #[serde(rename = "openai")]
34 OpenAi {
35 default_model: Option<OpenAiModel>,
36 api_url: Option<String>,
37 available_models: Option<Vec<OpenAiModel>>,
38 },
39 #[serde(rename = "anthropic")]
40 Anthropic {
41 default_model: Option<AnthropicModel>,
42 api_url: Option<String>,
43 },
44 #[serde(rename = "ollama")]
45 Ollama {
46 default_model: Option<OllamaModel>,
47 api_url: Option<String>,
48 },
49}
50
51#[derive(Debug, Default)]
52pub struct AssistantSettings {
53 pub enabled: bool,
54 pub button: bool,
55 pub dock: AssistantDockPosition,
56 pub default_width: Pixels,
57 pub default_height: Pixels,
58 pub default_model: LanguageModelSelection,
59 pub inline_alternatives: Vec<LanguageModelSelection>,
60 pub using_outdated_settings_version: bool,
61 pub enable_experimental_live_diffs: bool,
62 pub show_hints: bool,
63}
64
65impl AssistantSettings {
66 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
67 cx.is_staff() || self.enable_experimental_live_diffs
68 }
69}
70
71/// Assistant panel settings
72#[derive(Clone, Serialize, Deserialize, Debug)]
73#[serde(untagged)]
74pub enum AssistantSettingsContent {
75 Versioned(VersionedAssistantSettingsContent),
76 Legacy(LegacyAssistantSettingsContent),
77}
78
79impl JsonSchema for AssistantSettingsContent {
80 fn schema_name() -> String {
81 VersionedAssistantSettingsContent::schema_name()
82 }
83
84 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
85 VersionedAssistantSettingsContent::json_schema(gen)
86 }
87
88 fn is_referenceable() -> bool {
89 VersionedAssistantSettingsContent::is_referenceable()
90 }
91}
92
93impl Default for AssistantSettingsContent {
94 fn default() -> Self {
95 Self::Versioned(VersionedAssistantSettingsContent::default())
96 }
97}
98
99impl AssistantSettingsContent {
100 pub fn is_version_outdated(&self) -> bool {
101 match self {
102 AssistantSettingsContent::Versioned(settings) => match settings {
103 VersionedAssistantSettingsContent::V1(_) => true,
104 VersionedAssistantSettingsContent::V2(_) => false,
105 },
106 AssistantSettingsContent::Legacy(_) => true,
107 }
108 }
109
110 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
111 if let AssistantSettingsContent::Versioned(settings) = self {
112 if let VersionedAssistantSettingsContent::V1(settings) = settings {
113 if let Some(provider) = settings.provider.clone() {
114 match provider {
115 AssistantProviderContentV1::Anthropic { api_url, .. } => {
116 update_settings_file::<AllLanguageModelSettings>(
117 fs,
118 cx,
119 move |content, _| {
120 if content.anthropic.is_none() {
121 content.anthropic =
122 Some(AnthropicSettingsContent::Versioned(
123 VersionedAnthropicSettingsContent::V1(
124 AnthropicSettingsContentV1 {
125 api_url,
126 available_models: None,
127 },
128 ),
129 ));
130 }
131 },
132 )
133 }
134 AssistantProviderContentV1::Ollama { api_url, .. } => {
135 update_settings_file::<AllLanguageModelSettings>(
136 fs,
137 cx,
138 move |content, _| {
139 if content.ollama.is_none() {
140 content.ollama = Some(OllamaSettingsContent {
141 api_url,
142 available_models: None,
143 });
144 }
145 },
146 )
147 }
148 AssistantProviderContentV1::OpenAi {
149 api_url,
150 available_models,
151 ..
152 } => update_settings_file::<AllLanguageModelSettings>(
153 fs,
154 cx,
155 move |content, _| {
156 if content.openai.is_none() {
157 let available_models = available_models.map(|models| {
158 models
159 .into_iter()
160 .filter_map(|model| match model {
161 OpenAiModel::Custom {
162 name,
163 display_name,
164 max_tokens,
165 max_output_tokens,
166 max_completion_tokens: None,
167 } => Some(open_ai::AvailableModel {
168 name,
169 display_name,
170 max_tokens,
171 max_output_tokens,
172 max_completion_tokens: None,
173 }),
174 _ => None,
175 })
176 .collect::<Vec<_>>()
177 });
178 content.openai = Some(OpenAiSettingsContent::Versioned(
179 VersionedOpenAiSettingsContent::V1(
180 OpenAiSettingsContentV1 {
181 api_url,
182 available_models,
183 },
184 ),
185 ));
186 }
187 },
188 ),
189 _ => {}
190 }
191 }
192 }
193 }
194
195 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
196 self.upgrade(),
197 ));
198 }
199
200 fn upgrade(&self) -> AssistantSettingsContentV2 {
201 match self {
202 AssistantSettingsContent::Versioned(settings) => match settings {
203 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
204 enabled: settings.enabled,
205 show_hints: None,
206 button: settings.button,
207 dock: settings.dock,
208 default_width: settings.default_width,
209 default_height: settings.default_width,
210 default_model: settings
211 .provider
212 .clone()
213 .and_then(|provider| match provider {
214 AssistantProviderContentV1::ZedDotDev { default_model } => {
215 default_model.map(|model| LanguageModelSelection {
216 provider: "zed.dev".to_string(),
217 model: model.id().to_string(),
218 })
219 }
220 AssistantProviderContentV1::OpenAi { default_model, .. } => {
221 default_model.map(|model| LanguageModelSelection {
222 provider: "openai".to_string(),
223 model: model.id().to_string(),
224 })
225 }
226 AssistantProviderContentV1::Anthropic { default_model, .. } => {
227 default_model.map(|model| LanguageModelSelection {
228 provider: "anthropic".to_string(),
229 model: model.id().to_string(),
230 })
231 }
232 AssistantProviderContentV1::Ollama { default_model, .. } => {
233 default_model.map(|model| LanguageModelSelection {
234 provider: "ollama".to_string(),
235 model: model.id().to_string(),
236 })
237 }
238 }),
239 inline_alternatives: None,
240 enable_experimental_live_diffs: None,
241 },
242 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
243 },
244 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
245 enabled: None,
246 show_hints: None,
247 button: settings.button,
248 dock: settings.dock,
249 default_width: settings.default_width,
250 default_height: settings.default_height,
251 default_model: Some(LanguageModelSelection {
252 provider: "openai".to_string(),
253 model: settings
254 .default_open_ai_model
255 .clone()
256 .unwrap_or_default()
257 .id()
258 .to_string(),
259 }),
260 inline_alternatives: None,
261 enable_experimental_live_diffs: None,
262 },
263 }
264 }
265
266 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
267 match self {
268 AssistantSettingsContent::Versioned(settings) => match settings {
269 VersionedAssistantSettingsContent::V1(settings) => {
270 settings.dock = Some(dock);
271 }
272 VersionedAssistantSettingsContent::V2(settings) => {
273 settings.dock = Some(dock);
274 }
275 },
276 AssistantSettingsContent::Legacy(settings) => {
277 settings.dock = Some(dock);
278 }
279 }
280 }
281
282 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
283 let model = language_model.id().0.to_string();
284 let provider = language_model.provider_id().0.to_string();
285
286 match self {
287 AssistantSettingsContent::Versioned(settings) => match settings {
288 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
289 "zed.dev" => {
290 log::warn!("attempted to set zed.dev model on outdated settings");
291 }
292 "anthropic" => {
293 let api_url = match &settings.provider {
294 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
295 api_url.clone()
296 }
297 _ => None,
298 };
299 settings.provider = Some(AssistantProviderContentV1::Anthropic {
300 default_model: AnthropicModel::from_id(&model).ok(),
301 api_url,
302 });
303 }
304 "ollama" => {
305 let api_url = match &settings.provider {
306 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
307 api_url.clone()
308 }
309 _ => None,
310 };
311 settings.provider = Some(AssistantProviderContentV1::Ollama {
312 default_model: Some(ollama::Model::new(&model, None, None)),
313 api_url,
314 });
315 }
316 "openai" => {
317 let (api_url, available_models) = match &settings.provider {
318 Some(AssistantProviderContentV1::OpenAi {
319 api_url,
320 available_models,
321 ..
322 }) => (api_url.clone(), available_models.clone()),
323 _ => (None, None),
324 };
325 settings.provider = Some(AssistantProviderContentV1::OpenAi {
326 default_model: OpenAiModel::from_id(&model).ok(),
327 api_url,
328 available_models,
329 });
330 }
331 _ => {}
332 },
333 VersionedAssistantSettingsContent::V2(settings) => {
334 settings.default_model = Some(LanguageModelSelection { provider, model });
335 }
336 },
337 AssistantSettingsContent::Legacy(settings) => {
338 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
339 settings.default_open_ai_model = Some(model);
340 }
341 }
342 }
343 }
344}
345
346#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
347#[serde(tag = "version")]
348pub enum VersionedAssistantSettingsContent {
349 #[serde(rename = "1")]
350 V1(AssistantSettingsContentV1),
351 #[serde(rename = "2")]
352 V2(AssistantSettingsContentV2),
353}
354
355impl Default for VersionedAssistantSettingsContent {
356 fn default() -> Self {
357 Self::V2(AssistantSettingsContentV2 {
358 enabled: None,
359 show_hints: None,
360 button: None,
361 dock: None,
362 default_width: None,
363 default_height: None,
364 default_model: None,
365 inline_alternatives: None,
366 enable_experimental_live_diffs: None,
367 })
368 }
369}
370
371#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
372pub struct AssistantSettingsContentV2 {
373 /// Whether the Assistant is enabled.
374 ///
375 /// Default: true
376 enabled: Option<bool>,
377 /// Whether to show inline hints that show keybindings for inline assistant
378 /// and assistant panel.
379 ///
380 /// Default: true
381 show_hints: Option<bool>,
382 /// Whether to show the assistant panel button in the status bar.
383 ///
384 /// Default: true
385 button: Option<bool>,
386 /// Where to dock the assistant.
387 ///
388 /// Default: right
389 dock: Option<AssistantDockPosition>,
390 /// Default width in pixels when the assistant is docked to the left or right.
391 ///
392 /// Default: 640
393 default_width: Option<f32>,
394 /// Default height in pixels when the assistant is docked to the bottom.
395 ///
396 /// Default: 320
397 default_height: Option<f32>,
398 /// The default model to use when creating new chats.
399 default_model: Option<LanguageModelSelection>,
400 /// Additional models with which to generate alternatives when performing inline assists.
401 inline_alternatives: Option<Vec<LanguageModelSelection>>,
402 /// Enable experimental live diffs in the assistant panel.
403 ///
404 /// Default: false
405 enable_experimental_live_diffs: Option<bool>,
406}
407
408#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
409pub struct LanguageModelSelection {
410 #[schemars(schema_with = "providers_schema")]
411 pub provider: String,
412 pub model: String,
413}
414
415fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
416 schemars::schema::SchemaObject {
417 enum_values: Some(vec![
418 "anthropic".into(),
419 "google".into(),
420 "ollama".into(),
421 "openai".into(),
422 "zed.dev".into(),
423 "copilot_chat".into(),
424 ]),
425 ..Default::default()
426 }
427 .into()
428}
429
430impl Default for LanguageModelSelection {
431 fn default() -> Self {
432 Self {
433 provider: "openai".to_string(),
434 model: "gpt-4".to_string(),
435 }
436 }
437}
438
439#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
440pub struct AssistantSettingsContentV1 {
441 /// Whether the Assistant is enabled.
442 ///
443 /// Default: true
444 enabled: Option<bool>,
445 /// Whether to show the assistant panel button in the status bar.
446 ///
447 /// Default: true
448 button: Option<bool>,
449 /// Where to dock the assistant.
450 ///
451 /// Default: right
452 dock: Option<AssistantDockPosition>,
453 /// Default width in pixels when the assistant is docked to the left or right.
454 ///
455 /// Default: 640
456 default_width: Option<f32>,
457 /// Default height in pixels when the assistant is docked to the bottom.
458 ///
459 /// Default: 320
460 default_height: Option<f32>,
461 /// The provider of the assistant service.
462 ///
463 /// This can be "openai", "anthropic", "ollama", "zed.dev"
464 /// each with their respective default models and configurations.
465 provider: Option<AssistantProviderContentV1>,
466}
467
468#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
469pub struct LegacyAssistantSettingsContent {
470 /// Whether to show the assistant panel button in the status bar.
471 ///
472 /// Default: true
473 pub button: Option<bool>,
474 /// Where to dock the assistant.
475 ///
476 /// Default: right
477 pub dock: Option<AssistantDockPosition>,
478 /// Default width in pixels when the assistant is docked to the left or right.
479 ///
480 /// Default: 640
481 pub default_width: Option<f32>,
482 /// Default height in pixels when the assistant is docked to the bottom.
483 ///
484 /// Default: 320
485 pub default_height: Option<f32>,
486 /// The default OpenAI model to use when creating new chats.
487 ///
488 /// Default: gpt-4-1106-preview
489 pub default_open_ai_model: Option<OpenAiModel>,
490 /// OpenAI API base URL to use when creating new chats.
491 ///
492 /// Default: https://api.openai.com/v1
493 pub openai_api_url: Option<String>,
494}
495
496impl Settings for AssistantSettings {
497 const KEY: Option<&'static str> = Some("assistant");
498
499 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
500
501 type FileContent = AssistantSettingsContent;
502
503 fn load(
504 sources: SettingsSources<Self::FileContent>,
505 _: &mut gpui::AppContext,
506 ) -> anyhow::Result<Self> {
507 let mut settings = AssistantSettings::default();
508
509 for value in sources.defaults_and_customizations() {
510 if value.is_version_outdated() {
511 settings.using_outdated_settings_version = true;
512 }
513
514 let value = value.upgrade();
515 merge(&mut settings.enabled, value.enabled);
516 merge(&mut settings.show_hints, value.show_hints);
517 merge(&mut settings.button, value.button);
518 merge(&mut settings.dock, value.dock);
519 merge(
520 &mut settings.default_width,
521 value.default_width.map(Into::into),
522 );
523 merge(
524 &mut settings.default_height,
525 value.default_height.map(Into::into),
526 );
527 merge(&mut settings.default_model, value.default_model);
528 merge(&mut settings.inline_alternatives, value.inline_alternatives);
529 merge(
530 &mut settings.enable_experimental_live_diffs,
531 value.enable_experimental_live_diffs,
532 );
533 }
534
535 Ok(settings)
536 }
537}
538
539fn merge<T>(target: &mut T, value: Option<T>) {
540 if let Some(value) = value {
541 *target = value;
542 }
543}
544
545#[cfg(test)]
546mod tests {
547 use gpui::{ReadGlobal, TestAppContext};
548
549 use super::*;
550
551 #[gpui::test]
552 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
553 let fs = fs::FakeFs::new(cx.executor().clone());
554 fs.create_dir(paths::settings_file().parent().unwrap())
555 .await
556 .unwrap();
557
558 cx.update(|cx| {
559 let test_settings = settings::SettingsStore::test(cx);
560 cx.set_global(test_settings);
561 AssistantSettings::register(cx);
562 });
563
564 cx.update(|cx| {
565 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
566 assert_eq!(
567 AssistantSettings::get_global(cx).default_model,
568 LanguageModelSelection {
569 provider: "zed.dev".into(),
570 model: "claude-3-5-sonnet".into(),
571 }
572 );
573 });
574
575 cx.update(|cx| {
576 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
577 fs.clone(),
578 |settings, _| {
579 *settings = AssistantSettingsContent::Versioned(
580 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
581 default_model: Some(LanguageModelSelection {
582 provider: "test-provider".into(),
583 model: "gpt-99".into(),
584 }),
585 inline_alternatives: None,
586 enabled: None,
587 show_hints: None,
588 button: None,
589 dock: None,
590 default_width: None,
591 default_height: None,
592 enable_experimental_live_diffs: None,
593 }),
594 )
595 },
596 );
597 });
598
599 cx.run_until_parked();
600
601 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
602 assert!(raw_settings_value.contains(r#""version": "2""#));
603
604 #[derive(Debug, Deserialize)]
605 struct AssistantSettingsTest {
606 assistant: AssistantSettingsContent,
607 }
608
609 let assistant_settings: AssistantSettingsTest =
610 serde_json_lenient::from_str(&raw_settings_value).unwrap();
611
612 assert!(!assistant_settings.assistant.is_version_outdated());
613 }
614}