1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{AgentStreamEditsFeatureFlag, Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Default, Clone, Debug)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub inline_assistant_model: Option<LanguageModelSelection>,
81 pub commit_message_model: Option<LanguageModelSelection>,
82 pub thread_summary_model: Option<LanguageModelSelection>,
83 pub inline_alternatives: Vec<LanguageModelSelection>,
84 pub using_outdated_settings_version: bool,
85 pub enable_experimental_live_diffs: bool,
86 pub default_profile: AgentProfileId,
87 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
88 pub always_allow_tool_actions: bool,
89 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
90 pub stream_edits: bool,
91 pub single_file_review: bool,
92 pub preferred_completion_mode: CompletionMode,
93}
94
95impl AssistantSettings {
96 pub fn stream_edits(&self, cx: &App) -> bool {
97 cx.has_flag::<AgentStreamEditsFeatureFlag>() || self.stream_edits
98 }
99
100 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
101 if cx.has_flag::<Assistant2FeatureFlag>() {
102 return false;
103 }
104
105 cx.is_staff() || self.enable_experimental_live_diffs
106 }
107
108 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
109 self.inline_assistant_model = Some(LanguageModelSelection { provider, model });
110 }
111
112 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
113 self.commit_message_model = Some(LanguageModelSelection { provider, model });
114 }
115
116 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
117 self.thread_summary_model = Some(LanguageModelSelection { provider, model });
118 }
119}
120
121/// Assistant panel settings
122#[derive(Clone, Serialize, Deserialize, Debug, Default)]
123pub struct AssistantSettingsContent {
124 #[serde(flatten)]
125 pub inner: Option<AssistantSettingsContentInner>,
126}
127
128#[derive(Clone, Serialize, Deserialize, Debug)]
129#[serde(untagged)]
130pub enum AssistantSettingsContentInner {
131 Versioned(Box<VersionedAssistantSettingsContent>),
132 Legacy(LegacyAssistantSettingsContent),
133}
134
135impl AssistantSettingsContentInner {
136 fn for_v2(content: AssistantSettingsContentV2) -> Self {
137 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
138 content,
139 )))
140 }
141}
142
143impl JsonSchema for AssistantSettingsContent {
144 fn schema_name() -> String {
145 VersionedAssistantSettingsContent::schema_name()
146 }
147
148 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
149 VersionedAssistantSettingsContent::json_schema(r#gen)
150 }
151
152 fn is_referenceable() -> bool {
153 VersionedAssistantSettingsContent::is_referenceable()
154 }
155}
156
157impl AssistantSettingsContent {
158 pub fn is_version_outdated(&self) -> bool {
159 match &self.inner {
160 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
161 VersionedAssistantSettingsContent::V1(_) => true,
162 VersionedAssistantSettingsContent::V2(_) => false,
163 },
164 Some(AssistantSettingsContentInner::Legacy(_)) => true,
165 None => false,
166 }
167 }
168
169 fn upgrade(&self) -> AssistantSettingsContentV2 {
170 match &self.inner {
171 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
172 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
173 enabled: settings.enabled,
174 button: settings.button,
175 dock: settings.dock,
176 default_width: settings.default_width,
177 default_height: settings.default_width,
178 default_model: settings
179 .provider
180 .clone()
181 .and_then(|provider| match provider {
182 AssistantProviderContentV1::ZedDotDev { default_model } => {
183 default_model.map(|model| LanguageModelSelection {
184 provider: "zed.dev".to_string(),
185 model: model.id().to_string(),
186 })
187 }
188 AssistantProviderContentV1::OpenAi { default_model, .. } => {
189 default_model.map(|model| LanguageModelSelection {
190 provider: "openai".to_string(),
191 model: model.id().to_string(),
192 })
193 }
194 AssistantProviderContentV1::Anthropic { default_model, .. } => {
195 default_model.map(|model| LanguageModelSelection {
196 provider: "anthropic".to_string(),
197 model: model.id().to_string(),
198 })
199 }
200 AssistantProviderContentV1::Ollama { default_model, .. } => {
201 default_model.map(|model| LanguageModelSelection {
202 provider: "ollama".to_string(),
203 model: model.id().to_string(),
204 })
205 }
206 AssistantProviderContentV1::LmStudio { default_model, .. } => {
207 default_model.map(|model| LanguageModelSelection {
208 provider: "lmstudio".to_string(),
209 model: model.id().to_string(),
210 })
211 }
212 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
213 default_model.map(|model| LanguageModelSelection {
214 provider: "deepseek".to_string(),
215 model: model.id().to_string(),
216 })
217 }
218 }),
219 inline_assistant_model: None,
220 commit_message_model: None,
221 thread_summary_model: None,
222 inline_alternatives: None,
223 enable_experimental_live_diffs: None,
224 default_profile: None,
225 profiles: None,
226 always_allow_tool_actions: None,
227 notify_when_agent_waiting: None,
228 stream_edits: None,
229 single_file_review: None,
230 preferred_completion_mode: None,
231 },
232 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
233 },
234 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
235 enabled: None,
236 button: settings.button,
237 dock: settings.dock,
238 default_width: settings.default_width,
239 default_height: settings.default_height,
240 default_model: Some(LanguageModelSelection {
241 provider: "openai".to_string(),
242 model: settings
243 .default_open_ai_model
244 .clone()
245 .unwrap_or_default()
246 .id()
247 .to_string(),
248 }),
249 inline_assistant_model: None,
250 commit_message_model: None,
251 thread_summary_model: None,
252 inline_alternatives: None,
253 enable_experimental_live_diffs: None,
254 default_profile: None,
255 profiles: None,
256 always_allow_tool_actions: None,
257 notify_when_agent_waiting: None,
258 stream_edits: None,
259 single_file_review: None,
260 preferred_completion_mode: None,
261 },
262 None => AssistantSettingsContentV2::default(),
263 }
264 }
265
266 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
267 match &mut self.inner {
268 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
269 VersionedAssistantSettingsContent::V1(ref mut settings) => {
270 settings.dock = Some(dock);
271 }
272 VersionedAssistantSettingsContent::V2(ref mut settings) => {
273 settings.dock = Some(dock);
274 }
275 },
276 Some(AssistantSettingsContentInner::Legacy(settings)) => {
277 settings.dock = Some(dock);
278 }
279 None => {
280 self.inner = Some(AssistantSettingsContentInner::for_v2(
281 AssistantSettingsContentV2 {
282 dock: Some(dock),
283 ..Default::default()
284 },
285 ))
286 }
287 }
288 }
289
290 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
291 let model = language_model.id().0.to_string();
292 let provider = language_model.provider_id().0.to_string();
293
294 match &mut self.inner {
295 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
296 VersionedAssistantSettingsContent::V1(ref mut settings) => {
297 match provider.as_ref() {
298 "zed.dev" => {
299 log::warn!("attempted to set zed.dev model on outdated settings");
300 }
301 "anthropic" => {
302 let api_url = match &settings.provider {
303 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
304 api_url.clone()
305 }
306 _ => None,
307 };
308 settings.provider = Some(AssistantProviderContentV1::Anthropic {
309 default_model: AnthropicModel::from_id(&model).ok(),
310 api_url,
311 });
312 }
313 "ollama" => {
314 let api_url = match &settings.provider {
315 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
316 api_url.clone()
317 }
318 _ => None,
319 };
320 settings.provider = Some(AssistantProviderContentV1::Ollama {
321 default_model: Some(ollama::Model::new(
322 &model,
323 None,
324 None,
325 language_model.supports_tools(),
326 )),
327 api_url,
328 });
329 }
330 "lmstudio" => {
331 let api_url = match &settings.provider {
332 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
333 api_url.clone()
334 }
335 _ => None,
336 };
337 settings.provider = Some(AssistantProviderContentV1::LmStudio {
338 default_model: Some(lmstudio::Model::new(&model, None, None)),
339 api_url,
340 });
341 }
342 "openai" => {
343 let (api_url, available_models) = match &settings.provider {
344 Some(AssistantProviderContentV1::OpenAi {
345 api_url,
346 available_models,
347 ..
348 }) => (api_url.clone(), available_models.clone()),
349 _ => (None, None),
350 };
351 settings.provider = Some(AssistantProviderContentV1::OpenAi {
352 default_model: OpenAiModel::from_id(&model).ok(),
353 api_url,
354 available_models,
355 });
356 }
357 "deepseek" => {
358 let api_url = match &settings.provider {
359 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
360 api_url.clone()
361 }
362 _ => None,
363 };
364 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
365 default_model: DeepseekModel::from_id(&model).ok(),
366 api_url,
367 });
368 }
369 _ => {}
370 }
371 }
372 VersionedAssistantSettingsContent::V2(ref mut settings) => {
373 settings.default_model = Some(LanguageModelSelection { provider, model });
374 }
375 },
376 Some(AssistantSettingsContentInner::Legacy(settings)) => {
377 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
378 settings.default_open_ai_model = Some(model);
379 }
380 }
381 None => {
382 self.inner = Some(AssistantSettingsContentInner::for_v2(
383 AssistantSettingsContentV2 {
384 default_model: Some(LanguageModelSelection { provider, model }),
385 ..Default::default()
386 },
387 ));
388 }
389 }
390 }
391
392 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
393 self.v2_setting(|setting| {
394 setting.inline_assistant_model = Some(LanguageModelSelection { provider, model });
395 Ok(())
396 })
397 .ok();
398 }
399
400 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
401 self.v2_setting(|setting| {
402 setting.commit_message_model = Some(LanguageModelSelection { provider, model });
403 Ok(())
404 })
405 .ok();
406 }
407
408 pub fn v2_setting(
409 &mut self,
410 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
411 ) -> anyhow::Result<()> {
412 match self.inner.get_or_insert_with(|| {
413 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
414 ..Default::default()
415 })
416 }) {
417 AssistantSettingsContentInner::Versioned(boxed) => {
418 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
419 f(settings)
420 } else {
421 Ok(())
422 }
423 }
424 _ => Ok(()),
425 }
426 }
427
428 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
429 self.v2_setting(|setting| {
430 setting.thread_summary_model = Some(LanguageModelSelection { provider, model });
431 Ok(())
432 })
433 .ok();
434 }
435
436 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
437 self.v2_setting(|setting| {
438 setting.always_allow_tool_actions = Some(allow);
439 Ok(())
440 })
441 .ok();
442 }
443
444 pub fn set_single_file_review(&mut self, allow: bool) {
445 self.v2_setting(|setting| {
446 setting.single_file_review = Some(allow);
447 Ok(())
448 })
449 .ok();
450 }
451
452 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
453 self.v2_setting(|setting| {
454 setting.default_profile = Some(profile_id);
455 Ok(())
456 })
457 .ok();
458 }
459
460 pub fn create_profile(
461 &mut self,
462 profile_id: AgentProfileId,
463 profile: AgentProfile,
464 ) -> Result<()> {
465 self.v2_setting(|settings| {
466 let profiles = settings.profiles.get_or_insert_default();
467 if profiles.contains_key(&profile_id) {
468 bail!("profile with ID '{profile_id}' already exists");
469 }
470
471 profiles.insert(
472 profile_id,
473 AgentProfileContent {
474 name: profile.name.into(),
475 tools: profile.tools,
476 enable_all_context_servers: Some(profile.enable_all_context_servers),
477 context_servers: profile
478 .context_servers
479 .into_iter()
480 .map(|(server_id, preset)| {
481 (
482 server_id,
483 ContextServerPresetContent {
484 tools: preset.tools,
485 },
486 )
487 })
488 .collect(),
489 },
490 );
491
492 Ok(())
493 })
494 }
495}
496
497#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
498#[serde(tag = "version")]
499pub enum VersionedAssistantSettingsContent {
500 #[serde(rename = "1")]
501 V1(AssistantSettingsContentV1),
502 #[serde(rename = "2")]
503 V2(AssistantSettingsContentV2),
504}
505
506impl Default for VersionedAssistantSettingsContent {
507 fn default() -> Self {
508 Self::V2(AssistantSettingsContentV2 {
509 enabled: None,
510 button: None,
511 dock: None,
512 default_width: None,
513 default_height: None,
514 default_model: None,
515 inline_assistant_model: None,
516 commit_message_model: None,
517 thread_summary_model: None,
518 inline_alternatives: None,
519 enable_experimental_live_diffs: None,
520 default_profile: None,
521 profiles: None,
522 always_allow_tool_actions: None,
523 notify_when_agent_waiting: None,
524 stream_edits: None,
525 single_file_review: None,
526 preferred_completion_mode: None,
527 })
528 }
529}
530
531#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
532pub struct AssistantSettingsContentV2 {
533 /// Whether the Assistant is enabled.
534 ///
535 /// Default: true
536 enabled: Option<bool>,
537 /// Whether to show the assistant panel button in the status bar.
538 ///
539 /// Default: true
540 button: Option<bool>,
541 /// Where to dock the assistant.
542 ///
543 /// Default: right
544 dock: Option<AssistantDockPosition>,
545 /// Default width in pixels when the assistant is docked to the left or right.
546 ///
547 /// Default: 640
548 default_width: Option<f32>,
549 /// Default height in pixels when the assistant is docked to the bottom.
550 ///
551 /// Default: 320
552 default_height: Option<f32>,
553 /// The default model to use when creating new chats and for other features when a specific model is not specified.
554 default_model: Option<LanguageModelSelection>,
555 /// Model to use for the inline assistant. Defaults to default_model when not specified.
556 inline_assistant_model: Option<LanguageModelSelection>,
557 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
558 commit_message_model: Option<LanguageModelSelection>,
559 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
560 thread_summary_model: Option<LanguageModelSelection>,
561 /// Additional models with which to generate alternatives when performing inline assists.
562 inline_alternatives: Option<Vec<LanguageModelSelection>>,
563 /// Enable experimental live diffs in the assistant panel.
564 ///
565 /// Default: false
566 enable_experimental_live_diffs: Option<bool>,
567 /// The default profile to use in the Agent.
568 ///
569 /// Default: write
570 default_profile: Option<AgentProfileId>,
571 /// The available agent profiles.
572 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
573 /// Whenever a tool action would normally wait for your confirmation
574 /// that you allow it, always choose to allow it.
575 ///
576 /// Default: false
577 always_allow_tool_actions: Option<bool>,
578 /// Where to show a popup notification when the agent is waiting for user input.
579 ///
580 /// Default: "primary_screen"
581 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
582 /// Whether to stream edits from the agent as they are received.
583 ///
584 /// Default: false
585 stream_edits: Option<bool>,
586 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
587 ///
588 /// Default: true
589 single_file_review: Option<bool>,
590
591 /// What completion mode to enable for new threads
592 ///
593 /// Default: normal
594 preferred_completion_mode: Option<CompletionMode>,
595}
596
597#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
598#[serde(rename_all = "snake_case")]
599pub enum CompletionMode {
600 #[default]
601 Normal,
602 Max,
603}
604
605impl From<CompletionMode> for zed_llm_client::CompletionMode {
606 fn from(value: CompletionMode) -> Self {
607 match value {
608 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
609 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
610 }
611 }
612}
613
614#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
615pub struct LanguageModelSelection {
616 #[schemars(schema_with = "providers_schema")]
617 pub provider: String,
618 pub model: String,
619}
620
621fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
622 schemars::schema::SchemaObject {
623 enum_values: Some(vec![
624 "anthropic".into(),
625 "bedrock".into(),
626 "google".into(),
627 "lmstudio".into(),
628 "ollama".into(),
629 "openai".into(),
630 "zed.dev".into(),
631 "copilot_chat".into(),
632 "deepseek".into(),
633 ]),
634 ..Default::default()
635 }
636 .into()
637}
638
639impl Default for LanguageModelSelection {
640 fn default() -> Self {
641 Self {
642 provider: "openai".to_string(),
643 model: "gpt-4".to_string(),
644 }
645 }
646}
647
648#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
649pub struct AgentProfileContent {
650 pub name: Arc<str>,
651 #[serde(default)]
652 pub tools: IndexMap<Arc<str>, bool>,
653 /// Whether all context servers are enabled by default.
654 pub enable_all_context_servers: Option<bool>,
655 #[serde(default)]
656 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
657}
658
659#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
660pub struct ContextServerPresetContent {
661 pub tools: IndexMap<Arc<str>, bool>,
662}
663
664#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
665pub struct AssistantSettingsContentV1 {
666 /// Whether the Assistant is enabled.
667 ///
668 /// Default: true
669 enabled: Option<bool>,
670 /// Whether to show the assistant panel button in the status bar.
671 ///
672 /// Default: true
673 button: Option<bool>,
674 /// Where to dock the assistant.
675 ///
676 /// Default: right
677 dock: Option<AssistantDockPosition>,
678 /// Default width in pixels when the assistant is docked to the left or right.
679 ///
680 /// Default: 640
681 default_width: Option<f32>,
682 /// Default height in pixels when the assistant is docked to the bottom.
683 ///
684 /// Default: 320
685 default_height: Option<f32>,
686 /// The provider of the assistant service.
687 ///
688 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
689 /// each with their respective default models and configurations.
690 provider: Option<AssistantProviderContentV1>,
691}
692
693#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
694pub struct LegacyAssistantSettingsContent {
695 /// Whether to show the assistant panel button in the status bar.
696 ///
697 /// Default: true
698 pub button: Option<bool>,
699 /// Where to dock the assistant.
700 ///
701 /// Default: right
702 pub dock: Option<AssistantDockPosition>,
703 /// Default width in pixels when the assistant is docked to the left or right.
704 ///
705 /// Default: 640
706 pub default_width: Option<f32>,
707 /// Default height in pixels when the assistant is docked to the bottom.
708 ///
709 /// Default: 320
710 pub default_height: Option<f32>,
711 /// The default OpenAI model to use when creating new chats.
712 ///
713 /// Default: gpt-4-1106-preview
714 pub default_open_ai_model: Option<OpenAiModel>,
715 /// OpenAI API base URL to use when creating new chats.
716 ///
717 /// Default: <https://api.openai.com/v1>
718 pub openai_api_url: Option<String>,
719}
720
721impl Settings for AssistantSettings {
722 const KEY: Option<&'static str> = Some("agent");
723
724 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
725
726 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
727
728 type FileContent = AssistantSettingsContent;
729
730 fn load(
731 sources: SettingsSources<Self::FileContent>,
732 _: &mut gpui::App,
733 ) -> anyhow::Result<Self> {
734 let mut settings = AssistantSettings::default();
735
736 for value in sources.defaults_and_customizations() {
737 if value.is_version_outdated() {
738 settings.using_outdated_settings_version = true;
739 }
740
741 let value = value.upgrade();
742 merge(&mut settings.enabled, value.enabled);
743 merge(&mut settings.button, value.button);
744 merge(&mut settings.dock, value.dock);
745 merge(
746 &mut settings.default_width,
747 value.default_width.map(Into::into),
748 );
749 merge(
750 &mut settings.default_height,
751 value.default_height.map(Into::into),
752 );
753 merge(&mut settings.default_model, value.default_model);
754 settings.inline_assistant_model = value
755 .inline_assistant_model
756 .or(settings.inline_assistant_model.take());
757 settings.commit_message_model = value
758 .commit_message_model
759 .or(settings.commit_message_model.take());
760 settings.thread_summary_model = value
761 .thread_summary_model
762 .or(settings.thread_summary_model.take());
763 merge(&mut settings.inline_alternatives, value.inline_alternatives);
764 merge(
765 &mut settings.enable_experimental_live_diffs,
766 value.enable_experimental_live_diffs,
767 );
768 merge(
769 &mut settings.always_allow_tool_actions,
770 value.always_allow_tool_actions,
771 );
772 merge(
773 &mut settings.notify_when_agent_waiting,
774 value.notify_when_agent_waiting,
775 );
776 merge(&mut settings.stream_edits, value.stream_edits);
777 merge(&mut settings.single_file_review, value.single_file_review);
778 merge(&mut settings.default_profile, value.default_profile);
779 merge(
780 &mut settings.preferred_completion_mode,
781 value.preferred_completion_mode,
782 );
783
784 if let Some(profiles) = value.profiles {
785 settings
786 .profiles
787 .extend(profiles.into_iter().map(|(id, profile)| {
788 (
789 id,
790 AgentProfile {
791 name: profile.name.into(),
792 tools: profile.tools,
793 enable_all_context_servers: profile
794 .enable_all_context_servers
795 .unwrap_or_default(),
796 context_servers: profile
797 .context_servers
798 .into_iter()
799 .map(|(context_server_id, preset)| {
800 (
801 context_server_id,
802 ContextServerPreset {
803 tools: preset.tools.clone(),
804 },
805 )
806 })
807 .collect(),
808 },
809 )
810 }));
811 }
812 }
813
814 Ok(settings)
815 }
816
817 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
818 if let Some(b) = vscode
819 .read_value("chat.agent.enabled")
820 .and_then(|b| b.as_bool())
821 {
822 match &mut current.inner {
823 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
824 match versioned.as_mut() {
825 VersionedAssistantSettingsContent::V1(setting) => {
826 setting.enabled = Some(b);
827 setting.button = Some(b);
828 }
829
830 VersionedAssistantSettingsContent::V2(setting) => {
831 setting.enabled = Some(b);
832 setting.button = Some(b);
833 }
834 }
835 }
836 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
837 None => {
838 current.inner = Some(AssistantSettingsContentInner::for_v2(
839 AssistantSettingsContentV2 {
840 enabled: Some(b),
841 button: Some(b),
842 ..Default::default()
843 },
844 ));
845 }
846 }
847 }
848 }
849}
850
851fn merge<T>(target: &mut T, value: Option<T>) {
852 if let Some(value) = value {
853 *target = value;
854 }
855}
856
857#[cfg(test)]
858mod tests {
859 use fs::Fs;
860 use gpui::{ReadGlobal, TestAppContext};
861 use settings::SettingsStore;
862
863 use super::*;
864
865 #[gpui::test]
866 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
867 let fs = fs::FakeFs::new(cx.executor().clone());
868 fs.create_dir(paths::settings_file().parent().unwrap())
869 .await
870 .unwrap();
871
872 cx.update(|cx| {
873 let test_settings = settings::SettingsStore::test(cx);
874 cx.set_global(test_settings);
875 AssistantSettings::register(cx);
876 });
877
878 cx.update(|cx| {
879 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
880 assert_eq!(
881 AssistantSettings::get_global(cx).default_model,
882 LanguageModelSelection {
883 provider: "zed.dev".into(),
884 model: "claude-3-7-sonnet-latest".into(),
885 }
886 );
887 });
888
889 cx.update(|cx| {
890 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
891 fs.clone(),
892 |settings, _| {
893 *settings = AssistantSettingsContent {
894 inner: Some(AssistantSettingsContentInner::for_v2(
895 AssistantSettingsContentV2 {
896 default_model: Some(LanguageModelSelection {
897 provider: "test-provider".into(),
898 model: "gpt-99".into(),
899 }),
900 inline_assistant_model: None,
901 commit_message_model: None,
902 thread_summary_model: None,
903 inline_alternatives: None,
904 enabled: None,
905 button: None,
906 dock: None,
907 default_width: None,
908 default_height: None,
909 enable_experimental_live_diffs: None,
910 default_profile: None,
911 profiles: None,
912 always_allow_tool_actions: None,
913 notify_when_agent_waiting: None,
914 stream_edits: None,
915 single_file_review: None,
916 preferred_completion_mode: None,
917 },
918 )),
919 }
920 },
921 );
922 });
923
924 cx.run_until_parked();
925
926 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
927 assert!(raw_settings_value.contains(r#""version": "2""#));
928
929 #[derive(Debug, Deserialize)]
930 struct AssistantSettingsTest {
931 agent: AssistantSettingsContent,
932 }
933
934 let assistant_settings: AssistantSettingsTest =
935 serde_json_lenient::from_str(&raw_settings_value).unwrap();
936
937 assert!(!assistant_settings.agent.is_version_outdated());
938 }
939
940 #[gpui::test]
941 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
942 let fs = fs::FakeFs::new(cx.executor().clone());
943 fs.create_dir(paths::settings_file().parent().unwrap())
944 .await
945 .unwrap();
946
947 cx.update(|cx| {
948 let mut test_settings = settings::SettingsStore::test(cx);
949 let user_settings_content = r#"{
950 "assistant": {
951 "enabled": true,
952 "version": "2",
953 "default_model": {
954 "provider": "zed.dev",
955 "model": "gpt-99"
956 },
957 }}"#;
958 test_settings
959 .set_user_settings(user_settings_content, cx)
960 .unwrap();
961 cx.set_global(test_settings);
962 AssistantSettings::register(cx);
963 });
964
965 cx.run_until_parked();
966
967 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
968 assert!(assistant_settings.enabled);
969 assert!(!assistant_settings.using_outdated_settings_version);
970 assert_eq!(assistant_settings.default_model.model, "gpt-99");
971
972 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
973 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
974 *settings = AssistantSettingsContent {
975 inner: Some(AssistantSettingsContentInner::for_v2(
976 AssistantSettingsContentV2 {
977 enabled: Some(false),
978 default_model: Some(LanguageModelSelection {
979 provider: "xai".to_owned(),
980 model: "grok".to_owned(),
981 }),
982 ..Default::default()
983 },
984 )),
985 };
986 });
987 });
988
989 cx.run_until_parked();
990
991 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
992
993 #[derive(Debug, Deserialize)]
994 struct AssistantSettingsTest {
995 assistant: AssistantSettingsContent,
996 agent: Option<serde_json_lenient::Value>,
997 }
998
999 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1000 assert!(assistant_settings.agent.is_none());
1001 }
1002}