1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Clone, Debug, Default)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub inline_assistant_model: Option<LanguageModelSelection>,
81 pub commit_message_model: Option<LanguageModelSelection>,
82 pub thread_summary_model: Option<LanguageModelSelection>,
83 pub inline_alternatives: Vec<LanguageModelSelection>,
84 pub using_outdated_settings_version: bool,
85 pub enable_experimental_live_diffs: bool,
86 pub default_profile: AgentProfileId,
87 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
88 pub always_allow_tool_actions: bool,
89 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
90}
91
92impl AssistantSettings {
93 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
94 if cx.has_flag::<Assistant2FeatureFlag>() {
95 return false;
96 }
97
98 cx.is_staff() || self.enable_experimental_live_diffs
99 }
100
101 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
102 self.inline_assistant_model = Some(LanguageModelSelection { provider, model });
103 }
104
105 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
106 self.commit_message_model = Some(LanguageModelSelection { provider, model });
107 }
108
109 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
110 self.thread_summary_model = Some(LanguageModelSelection { provider, model });
111 }
112}
113
114/// Assistant panel settings
115#[derive(Clone, Serialize, Deserialize, Debug, Default)]
116pub struct AssistantSettingsContent {
117 #[serde(flatten)]
118 pub inner: Option<AssistantSettingsContentInner>,
119}
120
121#[derive(Clone, Serialize, Deserialize, Debug)]
122#[serde(untagged)]
123pub enum AssistantSettingsContentInner {
124 Versioned(Box<VersionedAssistantSettingsContent>),
125 Legacy(LegacyAssistantSettingsContent),
126}
127
128impl AssistantSettingsContentInner {
129 fn for_v2(content: AssistantSettingsContentV2) -> Self {
130 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
131 content,
132 )))
133 }
134}
135
136impl JsonSchema for AssistantSettingsContent {
137 fn schema_name() -> String {
138 VersionedAssistantSettingsContent::schema_name()
139 }
140
141 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
142 VersionedAssistantSettingsContent::json_schema(r#gen)
143 }
144
145 fn is_referenceable() -> bool {
146 VersionedAssistantSettingsContent::is_referenceable()
147 }
148}
149
150impl AssistantSettingsContent {
151 pub fn is_version_outdated(&self) -> bool {
152 match &self.inner {
153 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
154 VersionedAssistantSettingsContent::V1(_) => true,
155 VersionedAssistantSettingsContent::V2(_) => false,
156 },
157 Some(AssistantSettingsContentInner::Legacy(_)) => true,
158 None => false,
159 }
160 }
161
162 fn upgrade(&self) -> AssistantSettingsContentV2 {
163 match &self.inner {
164 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
165 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
166 enabled: settings.enabled,
167 button: settings.button,
168 dock: settings.dock,
169 default_width: settings.default_width,
170 default_height: settings.default_width,
171 default_model: settings
172 .provider
173 .clone()
174 .and_then(|provider| match provider {
175 AssistantProviderContentV1::ZedDotDev { default_model } => {
176 default_model.map(|model| LanguageModelSelection {
177 provider: "zed.dev".to_string(),
178 model: model.id().to_string(),
179 })
180 }
181 AssistantProviderContentV1::OpenAi { default_model, .. } => {
182 default_model.map(|model| LanguageModelSelection {
183 provider: "openai".to_string(),
184 model: model.id().to_string(),
185 })
186 }
187 AssistantProviderContentV1::Anthropic { default_model, .. } => {
188 default_model.map(|model| LanguageModelSelection {
189 provider: "anthropic".to_string(),
190 model: model.id().to_string(),
191 })
192 }
193 AssistantProviderContentV1::Ollama { default_model, .. } => {
194 default_model.map(|model| LanguageModelSelection {
195 provider: "ollama".to_string(),
196 model: model.id().to_string(),
197 })
198 }
199 AssistantProviderContentV1::LmStudio { default_model, .. } => {
200 default_model.map(|model| LanguageModelSelection {
201 provider: "lmstudio".to_string(),
202 model: model.id().to_string(),
203 })
204 }
205 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
206 default_model.map(|model| LanguageModelSelection {
207 provider: "deepseek".to_string(),
208 model: model.id().to_string(),
209 })
210 }
211 }),
212 inline_assistant_model: None,
213 commit_message_model: None,
214 thread_summary_model: None,
215 inline_alternatives: None,
216 enable_experimental_live_diffs: None,
217 default_profile: None,
218 profiles: None,
219 always_allow_tool_actions: None,
220 notify_when_agent_waiting: None,
221 },
222 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
223 },
224 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
225 enabled: None,
226 button: settings.button,
227 dock: settings.dock,
228 default_width: settings.default_width,
229 default_height: settings.default_height,
230 default_model: Some(LanguageModelSelection {
231 provider: "openai".to_string(),
232 model: settings
233 .default_open_ai_model
234 .clone()
235 .unwrap_or_default()
236 .id()
237 .to_string(),
238 }),
239 inline_assistant_model: None,
240 commit_message_model: None,
241 thread_summary_model: None,
242 inline_alternatives: None,
243 enable_experimental_live_diffs: None,
244 default_profile: None,
245 profiles: None,
246 always_allow_tool_actions: None,
247 notify_when_agent_waiting: None,
248 },
249 None => AssistantSettingsContentV2::default(),
250 }
251 }
252
253 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
254 match &mut self.inner {
255 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
256 VersionedAssistantSettingsContent::V1(ref mut settings) => {
257 settings.dock = Some(dock);
258 }
259 VersionedAssistantSettingsContent::V2(ref mut settings) => {
260 settings.dock = Some(dock);
261 }
262 },
263 Some(AssistantSettingsContentInner::Legacy(settings)) => {
264 settings.dock = Some(dock);
265 }
266 None => {
267 self.inner = Some(AssistantSettingsContentInner::for_v2(
268 AssistantSettingsContentV2 {
269 dock: Some(dock),
270 ..Default::default()
271 },
272 ))
273 }
274 }
275 }
276
277 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
278 let model = language_model.id().0.to_string();
279 let provider = language_model.provider_id().0.to_string();
280
281 match &mut self.inner {
282 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
283 VersionedAssistantSettingsContent::V1(ref mut settings) => {
284 match provider.as_ref() {
285 "zed.dev" => {
286 log::warn!("attempted to set zed.dev model on outdated settings");
287 }
288 "anthropic" => {
289 let api_url = match &settings.provider {
290 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
291 api_url.clone()
292 }
293 _ => None,
294 };
295 settings.provider = Some(AssistantProviderContentV1::Anthropic {
296 default_model: AnthropicModel::from_id(&model).ok(),
297 api_url,
298 });
299 }
300 "ollama" => {
301 let api_url = match &settings.provider {
302 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
303 api_url.clone()
304 }
305 _ => None,
306 };
307 settings.provider = Some(AssistantProviderContentV1::Ollama {
308 default_model: Some(ollama::Model::new(&model, None, None)),
309 api_url,
310 });
311 }
312 "lmstudio" => {
313 let api_url = match &settings.provider {
314 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
315 api_url.clone()
316 }
317 _ => None,
318 };
319 settings.provider = Some(AssistantProviderContentV1::LmStudio {
320 default_model: Some(lmstudio::Model::new(&model, None, None)),
321 api_url,
322 });
323 }
324 "openai" => {
325 let (api_url, available_models) = match &settings.provider {
326 Some(AssistantProviderContentV1::OpenAi {
327 api_url,
328 available_models,
329 ..
330 }) => (api_url.clone(), available_models.clone()),
331 _ => (None, None),
332 };
333 settings.provider = Some(AssistantProviderContentV1::OpenAi {
334 default_model: OpenAiModel::from_id(&model).ok(),
335 api_url,
336 available_models,
337 });
338 }
339 "deepseek" => {
340 let api_url = match &settings.provider {
341 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
342 api_url.clone()
343 }
344 _ => None,
345 };
346 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
347 default_model: DeepseekModel::from_id(&model).ok(),
348 api_url,
349 });
350 }
351 _ => {}
352 }
353 }
354 VersionedAssistantSettingsContent::V2(ref mut settings) => {
355 settings.default_model = Some(LanguageModelSelection { provider, model });
356 }
357 },
358 Some(AssistantSettingsContentInner::Legacy(settings)) => {
359 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
360 settings.default_open_ai_model = Some(model);
361 }
362 }
363 None => {
364 self.inner = Some(AssistantSettingsContentInner::for_v2(
365 AssistantSettingsContentV2 {
366 default_model: Some(LanguageModelSelection { provider, model }),
367 ..Default::default()
368 },
369 ));
370 }
371 }
372 }
373
374 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
375 self.v2_setting(|setting| {
376 setting.inline_assistant_model = Some(LanguageModelSelection { provider, model });
377 Ok(())
378 })
379 .ok();
380 }
381
382 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
383 self.v2_setting(|setting| {
384 setting.commit_message_model = Some(LanguageModelSelection { provider, model });
385 Ok(())
386 })
387 .ok();
388 }
389
390 pub fn v2_setting(
391 &mut self,
392 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
393 ) -> anyhow::Result<()> {
394 match self.inner.get_or_insert_with(|| {
395 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
396 ..Default::default()
397 })
398 }) {
399 AssistantSettingsContentInner::Versioned(boxed) => {
400 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
401 f(settings)
402 } else {
403 Ok(())
404 }
405 }
406 _ => Ok(()),
407 }
408 }
409
410 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
411 self.v2_setting(|setting| {
412 setting.thread_summary_model = Some(LanguageModelSelection { provider, model });
413 Ok(())
414 })
415 .ok();
416 }
417
418 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
419 self.v2_setting(|setting| {
420 setting.always_allow_tool_actions = Some(allow);
421 Ok(())
422 })
423 .ok();
424 }
425
426 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
427 self.v2_setting(|setting| {
428 setting.default_profile = Some(profile_id);
429 Ok(())
430 })
431 .ok();
432 }
433
434 pub fn create_profile(
435 &mut self,
436 profile_id: AgentProfileId,
437 profile: AgentProfile,
438 ) -> Result<()> {
439 self.v2_setting(|settings| {
440 let profiles = settings.profiles.get_or_insert_default();
441 if profiles.contains_key(&profile_id) {
442 bail!("profile with ID '{profile_id}' already exists");
443 }
444
445 profiles.insert(
446 profile_id,
447 AgentProfileContent {
448 name: profile.name.into(),
449 tools: profile.tools,
450 enable_all_context_servers: Some(profile.enable_all_context_servers),
451 context_servers: profile
452 .context_servers
453 .into_iter()
454 .map(|(server_id, preset)| {
455 (
456 server_id,
457 ContextServerPresetContent {
458 tools: preset.tools,
459 },
460 )
461 })
462 .collect(),
463 },
464 );
465
466 Ok(())
467 })
468 }
469}
470
471#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
472#[serde(tag = "version")]
473pub enum VersionedAssistantSettingsContent {
474 #[serde(rename = "1")]
475 V1(AssistantSettingsContentV1),
476 #[serde(rename = "2")]
477 V2(AssistantSettingsContentV2),
478}
479
480impl Default for VersionedAssistantSettingsContent {
481 fn default() -> Self {
482 Self::V2(AssistantSettingsContentV2 {
483 enabled: None,
484 button: None,
485 dock: None,
486 default_width: None,
487 default_height: None,
488 default_model: None,
489 inline_assistant_model: None,
490 commit_message_model: None,
491 thread_summary_model: None,
492 inline_alternatives: None,
493 enable_experimental_live_diffs: None,
494 default_profile: None,
495 profiles: None,
496 always_allow_tool_actions: None,
497 notify_when_agent_waiting: None,
498 })
499 }
500}
501
502#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
503pub struct AssistantSettingsContentV2 {
504 /// Whether the Assistant is enabled.
505 ///
506 /// Default: true
507 enabled: Option<bool>,
508 /// Whether to show the assistant panel button in the status bar.
509 ///
510 /// Default: true
511 button: Option<bool>,
512 /// Where to dock the assistant.
513 ///
514 /// Default: right
515 dock: Option<AssistantDockPosition>,
516 /// Default width in pixels when the assistant is docked to the left or right.
517 ///
518 /// Default: 640
519 default_width: Option<f32>,
520 /// Default height in pixels when the assistant is docked to the bottom.
521 ///
522 /// Default: 320
523 default_height: Option<f32>,
524 /// The default model to use when creating new chats and for other features when a specific model is not specified.
525 default_model: Option<LanguageModelSelection>,
526 /// Model to use for the inline assistant. Defaults to default_model when not specified.
527 inline_assistant_model: Option<LanguageModelSelection>,
528 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
529 commit_message_model: Option<LanguageModelSelection>,
530 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
531 thread_summary_model: Option<LanguageModelSelection>,
532 /// Additional models with which to generate alternatives when performing inline assists.
533 inline_alternatives: Option<Vec<LanguageModelSelection>>,
534 /// Enable experimental live diffs in the assistant panel.
535 ///
536 /// Default: false
537 enable_experimental_live_diffs: Option<bool>,
538 /// The default profile to use in the Agent.
539 ///
540 /// Default: write
541 default_profile: Option<AgentProfileId>,
542 /// The available agent profiles.
543 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
544 /// Whenever a tool action would normally wait for your confirmation
545 /// that you allow it, always choose to allow it.
546 ///
547 /// Default: false
548 always_allow_tool_actions: Option<bool>,
549 /// Where to show a popup notification when the agent is waiting for user input.
550 ///
551 /// Default: "primary_screen"
552 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
553}
554
555#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
556pub struct LanguageModelSelection {
557 #[schemars(schema_with = "providers_schema")]
558 pub provider: String,
559 pub model: String,
560}
561
562fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
563 schemars::schema::SchemaObject {
564 enum_values: Some(vec![
565 "anthropic".into(),
566 "bedrock".into(),
567 "google".into(),
568 "lmstudio".into(),
569 "ollama".into(),
570 "openai".into(),
571 "zed.dev".into(),
572 "copilot_chat".into(),
573 "deepseek".into(),
574 ]),
575 ..Default::default()
576 }
577 .into()
578}
579
580impl Default for LanguageModelSelection {
581 fn default() -> Self {
582 Self {
583 provider: "openai".to_string(),
584 model: "gpt-4".to_string(),
585 }
586 }
587}
588
589#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
590pub struct AgentProfileContent {
591 pub name: Arc<str>,
592 #[serde(default)]
593 pub tools: IndexMap<Arc<str>, bool>,
594 /// Whether all context servers are enabled by default.
595 pub enable_all_context_servers: Option<bool>,
596 #[serde(default)]
597 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
598}
599
600#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
601pub struct ContextServerPresetContent {
602 pub tools: IndexMap<Arc<str>, bool>,
603}
604
605#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
606pub struct AssistantSettingsContentV1 {
607 /// Whether the Assistant is enabled.
608 ///
609 /// Default: true
610 enabled: Option<bool>,
611 /// Whether to show the assistant panel button in the status bar.
612 ///
613 /// Default: true
614 button: Option<bool>,
615 /// Where to dock the assistant.
616 ///
617 /// Default: right
618 dock: Option<AssistantDockPosition>,
619 /// Default width in pixels when the assistant is docked to the left or right.
620 ///
621 /// Default: 640
622 default_width: Option<f32>,
623 /// Default height in pixels when the assistant is docked to the bottom.
624 ///
625 /// Default: 320
626 default_height: Option<f32>,
627 /// The provider of the assistant service.
628 ///
629 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
630 /// each with their respective default models and configurations.
631 provider: Option<AssistantProviderContentV1>,
632}
633
634#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
635pub struct LegacyAssistantSettingsContent {
636 /// Whether to show the assistant panel button in the status bar.
637 ///
638 /// Default: true
639 pub button: Option<bool>,
640 /// Where to dock the assistant.
641 ///
642 /// Default: right
643 pub dock: Option<AssistantDockPosition>,
644 /// Default width in pixels when the assistant is docked to the left or right.
645 ///
646 /// Default: 640
647 pub default_width: Option<f32>,
648 /// Default height in pixels when the assistant is docked to the bottom.
649 ///
650 /// Default: 320
651 pub default_height: Option<f32>,
652 /// The default OpenAI model to use when creating new chats.
653 ///
654 /// Default: gpt-4-1106-preview
655 pub default_open_ai_model: Option<OpenAiModel>,
656 /// OpenAI API base URL to use when creating new chats.
657 ///
658 /// Default: <https://api.openai.com/v1>
659 pub openai_api_url: Option<String>,
660}
661
662impl Settings for AssistantSettings {
663 const KEY: Option<&'static str> = Some("assistant");
664
665 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
666
667 type FileContent = AssistantSettingsContent;
668
669 fn load(
670 sources: SettingsSources<Self::FileContent>,
671 _: &mut gpui::App,
672 ) -> anyhow::Result<Self> {
673 let mut settings = AssistantSettings::default();
674
675 for value in sources.defaults_and_customizations() {
676 if value.is_version_outdated() {
677 settings.using_outdated_settings_version = true;
678 }
679
680 let value = value.upgrade();
681 merge(&mut settings.enabled, value.enabled);
682 merge(&mut settings.button, value.button);
683 merge(&mut settings.dock, value.dock);
684 merge(
685 &mut settings.default_width,
686 value.default_width.map(Into::into),
687 );
688 merge(
689 &mut settings.default_height,
690 value.default_height.map(Into::into),
691 );
692 merge(&mut settings.default_model, value.default_model);
693 settings.inline_assistant_model = value
694 .inline_assistant_model
695 .or(settings.inline_assistant_model.take());
696 settings.commit_message_model = value
697 .commit_message_model
698 .or(settings.commit_message_model.take());
699 settings.thread_summary_model = value
700 .thread_summary_model
701 .or(settings.thread_summary_model.take());
702 merge(&mut settings.inline_alternatives, value.inline_alternatives);
703 merge(
704 &mut settings.enable_experimental_live_diffs,
705 value.enable_experimental_live_diffs,
706 );
707 merge(
708 &mut settings.always_allow_tool_actions,
709 value.always_allow_tool_actions,
710 );
711 merge(
712 &mut settings.notify_when_agent_waiting,
713 value.notify_when_agent_waiting,
714 );
715 merge(&mut settings.default_profile, value.default_profile);
716
717 if let Some(profiles) = value.profiles {
718 settings
719 .profiles
720 .extend(profiles.into_iter().map(|(id, profile)| {
721 (
722 id,
723 AgentProfile {
724 name: profile.name.into(),
725 tools: profile.tools,
726 enable_all_context_servers: profile
727 .enable_all_context_servers
728 .unwrap_or_default(),
729 context_servers: profile
730 .context_servers
731 .into_iter()
732 .map(|(context_server_id, preset)| {
733 (
734 context_server_id,
735 ContextServerPreset {
736 tools: preset.tools.clone(),
737 },
738 )
739 })
740 .collect(),
741 },
742 )
743 }));
744 }
745 }
746
747 Ok(settings)
748 }
749
750 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
751 if let Some(b) = vscode
752 .read_value("chat.agent.enabled")
753 .and_then(|b| b.as_bool())
754 {
755 match &mut current.inner {
756 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
757 match versioned.as_mut() {
758 VersionedAssistantSettingsContent::V1(setting) => {
759 setting.enabled = Some(b);
760 setting.button = Some(b);
761 }
762
763 VersionedAssistantSettingsContent::V2(setting) => {
764 setting.enabled = Some(b);
765 setting.button = Some(b);
766 }
767 }
768 }
769 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
770 None => {
771 current.inner = Some(AssistantSettingsContentInner::for_v2(
772 AssistantSettingsContentV2 {
773 enabled: Some(b),
774 button: Some(b),
775 ..Default::default()
776 },
777 ));
778 }
779 }
780 }
781 }
782}
783
784fn merge<T>(target: &mut T, value: Option<T>) {
785 if let Some(value) = value {
786 *target = value;
787 }
788}
789
790#[cfg(test)]
791mod tests {
792 use fs::Fs;
793 use gpui::{ReadGlobal, TestAppContext};
794
795 use super::*;
796
797 #[gpui::test]
798 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
799 let fs = fs::FakeFs::new(cx.executor().clone());
800 fs.create_dir(paths::settings_file().parent().unwrap())
801 .await
802 .unwrap();
803
804 cx.update(|cx| {
805 let test_settings = settings::SettingsStore::test(cx);
806 cx.set_global(test_settings);
807 AssistantSettings::register(cx);
808 });
809
810 cx.update(|cx| {
811 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
812 assert_eq!(
813 AssistantSettings::get_global(cx).default_model,
814 LanguageModelSelection {
815 provider: "zed.dev".into(),
816 model: "claude-3-7-sonnet-latest".into(),
817 }
818 );
819 });
820
821 cx.update(|cx| {
822 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
823 fs.clone(),
824 |settings, _| {
825 *settings = AssistantSettingsContent {
826 inner: Some(AssistantSettingsContentInner::for_v2(
827 AssistantSettingsContentV2 {
828 default_model: Some(LanguageModelSelection {
829 provider: "test-provider".into(),
830 model: "gpt-99".into(),
831 }),
832 inline_assistant_model: None,
833 commit_message_model: None,
834 thread_summary_model: None,
835 inline_alternatives: None,
836 enabled: None,
837 button: None,
838 dock: None,
839 default_width: None,
840 default_height: None,
841 enable_experimental_live_diffs: None,
842 default_profile: None,
843 profiles: None,
844 always_allow_tool_actions: None,
845 notify_when_agent_waiting: None,
846 },
847 )),
848 }
849 },
850 );
851 });
852
853 cx.run_until_parked();
854
855 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
856 assert!(raw_settings_value.contains(r#""version": "2""#));
857
858 #[derive(Debug, Deserialize)]
859 struct AssistantSettingsTest {
860 assistant: AssistantSettingsContent,
861 }
862
863 let assistant_settings: AssistantSettingsTest =
864 serde_json_lenient::from_str(&raw_settings_value).unwrap();
865
866 assert!(!assistant_settings.assistant.is_version_outdated());
867 }
868}