1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{AgentStreamEditsFeatureFlag, Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Clone, Debug, Default)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub inline_assistant_model: Option<LanguageModelSelection>,
81 pub commit_message_model: Option<LanguageModelSelection>,
82 pub thread_summary_model: Option<LanguageModelSelection>,
83 pub inline_alternatives: Vec<LanguageModelSelection>,
84 pub using_outdated_settings_version: bool,
85 pub enable_experimental_live_diffs: bool,
86 pub default_profile: AgentProfileId,
87 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
88 pub always_allow_tool_actions: bool,
89 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
90 pub stream_edits: bool,
91}
92
93impl AssistantSettings {
94 pub fn stream_edits(&self, cx: &App) -> bool {
95 cx.has_flag::<AgentStreamEditsFeatureFlag>() || self.stream_edits
96 }
97
98 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
99 if cx.has_flag::<Assistant2FeatureFlag>() {
100 return false;
101 }
102
103 cx.is_staff() || self.enable_experimental_live_diffs
104 }
105
106 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
107 self.inline_assistant_model = Some(LanguageModelSelection { provider, model });
108 }
109
110 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
111 self.commit_message_model = Some(LanguageModelSelection { provider, model });
112 }
113
114 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
115 self.thread_summary_model = Some(LanguageModelSelection { provider, model });
116 }
117}
118
119/// Assistant panel settings
120#[derive(Clone, Serialize, Deserialize, Debug, Default)]
121pub struct AssistantSettingsContent {
122 #[serde(flatten)]
123 pub inner: Option<AssistantSettingsContentInner>,
124}
125
126#[derive(Clone, Serialize, Deserialize, Debug)]
127#[serde(untagged)]
128pub enum AssistantSettingsContentInner {
129 Versioned(Box<VersionedAssistantSettingsContent>),
130 Legacy(LegacyAssistantSettingsContent),
131}
132
133impl AssistantSettingsContentInner {
134 fn for_v2(content: AssistantSettingsContentV2) -> Self {
135 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
136 content,
137 )))
138 }
139}
140
141impl JsonSchema for AssistantSettingsContent {
142 fn schema_name() -> String {
143 VersionedAssistantSettingsContent::schema_name()
144 }
145
146 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
147 VersionedAssistantSettingsContent::json_schema(r#gen)
148 }
149
150 fn is_referenceable() -> bool {
151 VersionedAssistantSettingsContent::is_referenceable()
152 }
153}
154
155impl AssistantSettingsContent {
156 pub fn is_version_outdated(&self) -> bool {
157 match &self.inner {
158 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
159 VersionedAssistantSettingsContent::V1(_) => true,
160 VersionedAssistantSettingsContent::V2(_) => false,
161 },
162 Some(AssistantSettingsContentInner::Legacy(_)) => true,
163 None => false,
164 }
165 }
166
167 fn upgrade(&self) -> AssistantSettingsContentV2 {
168 match &self.inner {
169 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
170 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
171 enabled: settings.enabled,
172 button: settings.button,
173 dock: settings.dock,
174 default_width: settings.default_width,
175 default_height: settings.default_width,
176 default_model: settings
177 .provider
178 .clone()
179 .and_then(|provider| match provider {
180 AssistantProviderContentV1::ZedDotDev { default_model } => {
181 default_model.map(|model| LanguageModelSelection {
182 provider: "zed.dev".to_string(),
183 model: model.id().to_string(),
184 })
185 }
186 AssistantProviderContentV1::OpenAi { default_model, .. } => {
187 default_model.map(|model| LanguageModelSelection {
188 provider: "openai".to_string(),
189 model: model.id().to_string(),
190 })
191 }
192 AssistantProviderContentV1::Anthropic { default_model, .. } => {
193 default_model.map(|model| LanguageModelSelection {
194 provider: "anthropic".to_string(),
195 model: model.id().to_string(),
196 })
197 }
198 AssistantProviderContentV1::Ollama { default_model, .. } => {
199 default_model.map(|model| LanguageModelSelection {
200 provider: "ollama".to_string(),
201 model: model.id().to_string(),
202 })
203 }
204 AssistantProviderContentV1::LmStudio { default_model, .. } => {
205 default_model.map(|model| LanguageModelSelection {
206 provider: "lmstudio".to_string(),
207 model: model.id().to_string(),
208 })
209 }
210 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
211 default_model.map(|model| LanguageModelSelection {
212 provider: "deepseek".to_string(),
213 model: model.id().to_string(),
214 })
215 }
216 }),
217 inline_assistant_model: None,
218 commit_message_model: None,
219 thread_summary_model: None,
220 inline_alternatives: None,
221 enable_experimental_live_diffs: None,
222 default_profile: None,
223 profiles: None,
224 always_allow_tool_actions: None,
225 notify_when_agent_waiting: None,
226 stream_edits: None,
227 },
228 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
229 },
230 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
231 enabled: None,
232 button: settings.button,
233 dock: settings.dock,
234 default_width: settings.default_width,
235 default_height: settings.default_height,
236 default_model: Some(LanguageModelSelection {
237 provider: "openai".to_string(),
238 model: settings
239 .default_open_ai_model
240 .clone()
241 .unwrap_or_default()
242 .id()
243 .to_string(),
244 }),
245 inline_assistant_model: None,
246 commit_message_model: None,
247 thread_summary_model: None,
248 inline_alternatives: None,
249 enable_experimental_live_diffs: None,
250 default_profile: None,
251 profiles: None,
252 always_allow_tool_actions: None,
253 notify_when_agent_waiting: None,
254 stream_edits: None,
255 },
256 None => AssistantSettingsContentV2::default(),
257 }
258 }
259
260 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
261 match &mut self.inner {
262 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
263 VersionedAssistantSettingsContent::V1(ref mut settings) => {
264 settings.dock = Some(dock);
265 }
266 VersionedAssistantSettingsContent::V2(ref mut settings) => {
267 settings.dock = Some(dock);
268 }
269 },
270 Some(AssistantSettingsContentInner::Legacy(settings)) => {
271 settings.dock = Some(dock);
272 }
273 None => {
274 self.inner = Some(AssistantSettingsContentInner::for_v2(
275 AssistantSettingsContentV2 {
276 dock: Some(dock),
277 ..Default::default()
278 },
279 ))
280 }
281 }
282 }
283
284 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
285 let model = language_model.id().0.to_string();
286 let provider = language_model.provider_id().0.to_string();
287
288 match &mut self.inner {
289 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
290 VersionedAssistantSettingsContent::V1(ref mut settings) => {
291 match provider.as_ref() {
292 "zed.dev" => {
293 log::warn!("attempted to set zed.dev model on outdated settings");
294 }
295 "anthropic" => {
296 let api_url = match &settings.provider {
297 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
298 api_url.clone()
299 }
300 _ => None,
301 };
302 settings.provider = Some(AssistantProviderContentV1::Anthropic {
303 default_model: AnthropicModel::from_id(&model).ok(),
304 api_url,
305 });
306 }
307 "ollama" => {
308 let api_url = match &settings.provider {
309 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
310 api_url.clone()
311 }
312 _ => None,
313 };
314 settings.provider = Some(AssistantProviderContentV1::Ollama {
315 default_model: Some(ollama::Model::new(&model, None, None)),
316 api_url,
317 });
318 }
319 "lmstudio" => {
320 let api_url = match &settings.provider {
321 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
322 api_url.clone()
323 }
324 _ => None,
325 };
326 settings.provider = Some(AssistantProviderContentV1::LmStudio {
327 default_model: Some(lmstudio::Model::new(&model, None, None)),
328 api_url,
329 });
330 }
331 "openai" => {
332 let (api_url, available_models) = match &settings.provider {
333 Some(AssistantProviderContentV1::OpenAi {
334 api_url,
335 available_models,
336 ..
337 }) => (api_url.clone(), available_models.clone()),
338 _ => (None, None),
339 };
340 settings.provider = Some(AssistantProviderContentV1::OpenAi {
341 default_model: OpenAiModel::from_id(&model).ok(),
342 api_url,
343 available_models,
344 });
345 }
346 "deepseek" => {
347 let api_url = match &settings.provider {
348 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
349 api_url.clone()
350 }
351 _ => None,
352 };
353 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
354 default_model: DeepseekModel::from_id(&model).ok(),
355 api_url,
356 });
357 }
358 _ => {}
359 }
360 }
361 VersionedAssistantSettingsContent::V2(ref mut settings) => {
362 settings.default_model = Some(LanguageModelSelection { provider, model });
363 }
364 },
365 Some(AssistantSettingsContentInner::Legacy(settings)) => {
366 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
367 settings.default_open_ai_model = Some(model);
368 }
369 }
370 None => {
371 self.inner = Some(AssistantSettingsContentInner::for_v2(
372 AssistantSettingsContentV2 {
373 default_model: Some(LanguageModelSelection { provider, model }),
374 ..Default::default()
375 },
376 ));
377 }
378 }
379 }
380
381 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
382 self.v2_setting(|setting| {
383 setting.inline_assistant_model = Some(LanguageModelSelection { provider, model });
384 Ok(())
385 })
386 .ok();
387 }
388
389 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
390 self.v2_setting(|setting| {
391 setting.commit_message_model = Some(LanguageModelSelection { provider, model });
392 Ok(())
393 })
394 .ok();
395 }
396
397 pub fn v2_setting(
398 &mut self,
399 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
400 ) -> anyhow::Result<()> {
401 match self.inner.get_or_insert_with(|| {
402 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
403 ..Default::default()
404 })
405 }) {
406 AssistantSettingsContentInner::Versioned(boxed) => {
407 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
408 f(settings)
409 } else {
410 Ok(())
411 }
412 }
413 _ => Ok(()),
414 }
415 }
416
417 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
418 self.v2_setting(|setting| {
419 setting.thread_summary_model = Some(LanguageModelSelection { provider, model });
420 Ok(())
421 })
422 .ok();
423 }
424
425 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
426 self.v2_setting(|setting| {
427 setting.always_allow_tool_actions = Some(allow);
428 Ok(())
429 })
430 .ok();
431 }
432
433 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
434 self.v2_setting(|setting| {
435 setting.default_profile = Some(profile_id);
436 Ok(())
437 })
438 .ok();
439 }
440
441 pub fn create_profile(
442 &mut self,
443 profile_id: AgentProfileId,
444 profile: AgentProfile,
445 ) -> Result<()> {
446 self.v2_setting(|settings| {
447 let profiles = settings.profiles.get_or_insert_default();
448 if profiles.contains_key(&profile_id) {
449 bail!("profile with ID '{profile_id}' already exists");
450 }
451
452 profiles.insert(
453 profile_id,
454 AgentProfileContent {
455 name: profile.name.into(),
456 tools: profile.tools,
457 enable_all_context_servers: Some(profile.enable_all_context_servers),
458 context_servers: profile
459 .context_servers
460 .into_iter()
461 .map(|(server_id, preset)| {
462 (
463 server_id,
464 ContextServerPresetContent {
465 tools: preset.tools,
466 },
467 )
468 })
469 .collect(),
470 },
471 );
472
473 Ok(())
474 })
475 }
476}
477
478#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
479#[serde(tag = "version")]
480pub enum VersionedAssistantSettingsContent {
481 #[serde(rename = "1")]
482 V1(AssistantSettingsContentV1),
483 #[serde(rename = "2")]
484 V2(AssistantSettingsContentV2),
485}
486
487impl Default for VersionedAssistantSettingsContent {
488 fn default() -> Self {
489 Self::V2(AssistantSettingsContentV2 {
490 enabled: None,
491 button: None,
492 dock: None,
493 default_width: None,
494 default_height: None,
495 default_model: None,
496 inline_assistant_model: None,
497 commit_message_model: None,
498 thread_summary_model: None,
499 inline_alternatives: None,
500 enable_experimental_live_diffs: None,
501 default_profile: None,
502 profiles: None,
503 always_allow_tool_actions: None,
504 notify_when_agent_waiting: None,
505 stream_edits: None,
506 })
507 }
508}
509
510#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
511pub struct AssistantSettingsContentV2 {
512 /// Whether the Assistant is enabled.
513 ///
514 /// Default: true
515 enabled: Option<bool>,
516 /// Whether to show the assistant panel button in the status bar.
517 ///
518 /// Default: true
519 button: Option<bool>,
520 /// Where to dock the assistant.
521 ///
522 /// Default: right
523 dock: Option<AssistantDockPosition>,
524 /// Default width in pixels when the assistant is docked to the left or right.
525 ///
526 /// Default: 640
527 default_width: Option<f32>,
528 /// Default height in pixels when the assistant is docked to the bottom.
529 ///
530 /// Default: 320
531 default_height: Option<f32>,
532 /// The default model to use when creating new chats and for other features when a specific model is not specified.
533 default_model: Option<LanguageModelSelection>,
534 /// Model to use for the inline assistant. Defaults to default_model when not specified.
535 inline_assistant_model: Option<LanguageModelSelection>,
536 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
537 commit_message_model: Option<LanguageModelSelection>,
538 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
539 thread_summary_model: Option<LanguageModelSelection>,
540 /// Additional models with which to generate alternatives when performing inline assists.
541 inline_alternatives: Option<Vec<LanguageModelSelection>>,
542 /// Enable experimental live diffs in the assistant panel.
543 ///
544 /// Default: false
545 enable_experimental_live_diffs: Option<bool>,
546 /// The default profile to use in the Agent.
547 ///
548 /// Default: write
549 default_profile: Option<AgentProfileId>,
550 /// The available agent profiles.
551 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
552 /// Whenever a tool action would normally wait for your confirmation
553 /// that you allow it, always choose to allow it.
554 ///
555 /// Default: false
556 always_allow_tool_actions: Option<bool>,
557 /// Where to show a popup notification when the agent is waiting for user input.
558 ///
559 /// Default: "primary_screen"
560 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
561 /// Whether to stream edits from the agent as they are received.
562 ///
563 /// Default: false
564 stream_edits: Option<bool>,
565}
566
567#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
568pub struct LanguageModelSelection {
569 #[schemars(schema_with = "providers_schema")]
570 pub provider: String,
571 pub model: String,
572}
573
574fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
575 schemars::schema::SchemaObject {
576 enum_values: Some(vec![
577 "anthropic".into(),
578 "bedrock".into(),
579 "google".into(),
580 "lmstudio".into(),
581 "ollama".into(),
582 "openai".into(),
583 "zed.dev".into(),
584 "copilot_chat".into(),
585 "deepseek".into(),
586 ]),
587 ..Default::default()
588 }
589 .into()
590}
591
592impl Default for LanguageModelSelection {
593 fn default() -> Self {
594 Self {
595 provider: "openai".to_string(),
596 model: "gpt-4".to_string(),
597 }
598 }
599}
600
601#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
602pub struct AgentProfileContent {
603 pub name: Arc<str>,
604 #[serde(default)]
605 pub tools: IndexMap<Arc<str>, bool>,
606 /// Whether all context servers are enabled by default.
607 pub enable_all_context_servers: Option<bool>,
608 #[serde(default)]
609 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
610}
611
612#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
613pub struct ContextServerPresetContent {
614 pub tools: IndexMap<Arc<str>, bool>,
615}
616
617#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
618pub struct AssistantSettingsContentV1 {
619 /// Whether the Assistant is enabled.
620 ///
621 /// Default: true
622 enabled: Option<bool>,
623 /// Whether to show the assistant panel button in the status bar.
624 ///
625 /// Default: true
626 button: Option<bool>,
627 /// Where to dock the assistant.
628 ///
629 /// Default: right
630 dock: Option<AssistantDockPosition>,
631 /// Default width in pixels when the assistant is docked to the left or right.
632 ///
633 /// Default: 640
634 default_width: Option<f32>,
635 /// Default height in pixels when the assistant is docked to the bottom.
636 ///
637 /// Default: 320
638 default_height: Option<f32>,
639 /// The provider of the assistant service.
640 ///
641 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
642 /// each with their respective default models and configurations.
643 provider: Option<AssistantProviderContentV1>,
644}
645
646#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
647pub struct LegacyAssistantSettingsContent {
648 /// Whether to show the assistant panel button in the status bar.
649 ///
650 /// Default: true
651 pub button: Option<bool>,
652 /// Where to dock the assistant.
653 ///
654 /// Default: right
655 pub dock: Option<AssistantDockPosition>,
656 /// Default width in pixels when the assistant is docked to the left or right.
657 ///
658 /// Default: 640
659 pub default_width: Option<f32>,
660 /// Default height in pixels when the assistant is docked to the bottom.
661 ///
662 /// Default: 320
663 pub default_height: Option<f32>,
664 /// The default OpenAI model to use when creating new chats.
665 ///
666 /// Default: gpt-4-1106-preview
667 pub default_open_ai_model: Option<OpenAiModel>,
668 /// OpenAI API base URL to use when creating new chats.
669 ///
670 /// Default: <https://api.openai.com/v1>
671 pub openai_api_url: Option<String>,
672}
673
674impl Settings for AssistantSettings {
675 const KEY: Option<&'static str> = Some("assistant");
676
677 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
678
679 type FileContent = AssistantSettingsContent;
680
681 fn load(
682 sources: SettingsSources<Self::FileContent>,
683 _: &mut gpui::App,
684 ) -> anyhow::Result<Self> {
685 let mut settings = AssistantSettings::default();
686
687 for value in sources.defaults_and_customizations() {
688 if value.is_version_outdated() {
689 settings.using_outdated_settings_version = true;
690 }
691
692 let value = value.upgrade();
693 merge(&mut settings.enabled, value.enabled);
694 merge(&mut settings.button, value.button);
695 merge(&mut settings.dock, value.dock);
696 merge(
697 &mut settings.default_width,
698 value.default_width.map(Into::into),
699 );
700 merge(
701 &mut settings.default_height,
702 value.default_height.map(Into::into),
703 );
704 merge(&mut settings.default_model, value.default_model);
705 settings.inline_assistant_model = value
706 .inline_assistant_model
707 .or(settings.inline_assistant_model.take());
708 settings.commit_message_model = value
709 .commit_message_model
710 .or(settings.commit_message_model.take());
711 settings.thread_summary_model = value
712 .thread_summary_model
713 .or(settings.thread_summary_model.take());
714 merge(&mut settings.inline_alternatives, value.inline_alternatives);
715 merge(
716 &mut settings.enable_experimental_live_diffs,
717 value.enable_experimental_live_diffs,
718 );
719 merge(
720 &mut settings.always_allow_tool_actions,
721 value.always_allow_tool_actions,
722 );
723 merge(
724 &mut settings.notify_when_agent_waiting,
725 value.notify_when_agent_waiting,
726 );
727 merge(&mut settings.stream_edits, value.stream_edits);
728 merge(&mut settings.default_profile, value.default_profile);
729
730 if let Some(profiles) = value.profiles {
731 settings
732 .profiles
733 .extend(profiles.into_iter().map(|(id, profile)| {
734 (
735 id,
736 AgentProfile {
737 name: profile.name.into(),
738 tools: profile.tools,
739 enable_all_context_servers: profile
740 .enable_all_context_servers
741 .unwrap_or_default(),
742 context_servers: profile
743 .context_servers
744 .into_iter()
745 .map(|(context_server_id, preset)| {
746 (
747 context_server_id,
748 ContextServerPreset {
749 tools: preset.tools.clone(),
750 },
751 )
752 })
753 .collect(),
754 },
755 )
756 }));
757 }
758 }
759
760 Ok(settings)
761 }
762
763 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
764 if let Some(b) = vscode
765 .read_value("chat.agent.enabled")
766 .and_then(|b| b.as_bool())
767 {
768 match &mut current.inner {
769 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
770 match versioned.as_mut() {
771 VersionedAssistantSettingsContent::V1(setting) => {
772 setting.enabled = Some(b);
773 setting.button = Some(b);
774 }
775
776 VersionedAssistantSettingsContent::V2(setting) => {
777 setting.enabled = Some(b);
778 setting.button = Some(b);
779 }
780 }
781 }
782 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
783 None => {
784 current.inner = Some(AssistantSettingsContentInner::for_v2(
785 AssistantSettingsContentV2 {
786 enabled: Some(b),
787 button: Some(b),
788 ..Default::default()
789 },
790 ));
791 }
792 }
793 }
794 }
795}
796
797fn merge<T>(target: &mut T, value: Option<T>) {
798 if let Some(value) = value {
799 *target = value;
800 }
801}
802
803#[cfg(test)]
804mod tests {
805 use fs::Fs;
806 use gpui::{ReadGlobal, TestAppContext};
807
808 use super::*;
809
810 #[gpui::test]
811 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
812 let fs = fs::FakeFs::new(cx.executor().clone());
813 fs.create_dir(paths::settings_file().parent().unwrap())
814 .await
815 .unwrap();
816
817 cx.update(|cx| {
818 let test_settings = settings::SettingsStore::test(cx);
819 cx.set_global(test_settings);
820 AssistantSettings::register(cx);
821 });
822
823 cx.update(|cx| {
824 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
825 assert_eq!(
826 AssistantSettings::get_global(cx).default_model,
827 LanguageModelSelection {
828 provider: "zed.dev".into(),
829 model: "claude-3-7-sonnet-latest".into(),
830 }
831 );
832 });
833
834 cx.update(|cx| {
835 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
836 fs.clone(),
837 |settings, _| {
838 *settings = AssistantSettingsContent {
839 inner: Some(AssistantSettingsContentInner::for_v2(
840 AssistantSettingsContentV2 {
841 default_model: Some(LanguageModelSelection {
842 provider: "test-provider".into(),
843 model: "gpt-99".into(),
844 }),
845 inline_assistant_model: None,
846 commit_message_model: None,
847 thread_summary_model: None,
848 inline_alternatives: None,
849 enabled: None,
850 button: None,
851 dock: None,
852 default_width: None,
853 default_height: None,
854 enable_experimental_live_diffs: None,
855 default_profile: None,
856 profiles: None,
857 always_allow_tool_actions: None,
858 notify_when_agent_waiting: None,
859 stream_edits: None,
860 },
861 )),
862 }
863 },
864 );
865 });
866
867 cx.run_until_parked();
868
869 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
870 assert!(raw_settings_value.contains(r#""version": "2""#));
871
872 #[derive(Debug, Deserialize)]
873 struct AssistantSettingsTest {
874 assistant: AssistantSettingsContent,
875 }
876
877 let assistant_settings: AssistantSettingsTest =
878 serde_json_lenient::from_str(&raw_settings_value).unwrap();
879
880 assert!(!assistant_settings.assistant.is_version_outdated());
881 }
882}