1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Clone, Debug, Default)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub inline_assistant_model: Option<LanguageModelSelection>,
81 pub commit_message_model: Option<LanguageModelSelection>,
82 pub thread_summary_model: Option<LanguageModelSelection>,
83 pub inline_alternatives: Vec<LanguageModelSelection>,
84 pub using_outdated_settings_version: bool,
85 pub enable_experimental_live_diffs: bool,
86 pub default_profile: AgentProfileId,
87 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
88 pub always_allow_tool_actions: bool,
89 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
90}
91
92impl AssistantSettings {
93 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
94 if cx.has_flag::<Assistant2FeatureFlag>() {
95 return false;
96 }
97
98 cx.is_staff() || self.enable_experimental_live_diffs
99 }
100
101 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
102 self.inline_assistant_model = Some(LanguageModelSelection { provider, model });
103 }
104
105 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
106 self.commit_message_model = Some(LanguageModelSelection { provider, model });
107 }
108
109 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
110 self.thread_summary_model = Some(LanguageModelSelection { provider, model });
111 }
112}
113
114/// Assistant panel settings
115#[derive(Clone, Serialize, Deserialize, Debug)]
116#[serde(untagged)]
117pub enum AssistantSettingsContent {
118 Versioned(Box<VersionedAssistantSettingsContent>),
119 Legacy(LegacyAssistantSettingsContent),
120}
121
122impl JsonSchema for AssistantSettingsContent {
123 fn schema_name() -> String {
124 VersionedAssistantSettingsContent::schema_name()
125 }
126
127 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
128 VersionedAssistantSettingsContent::json_schema(r#gen)
129 }
130
131 fn is_referenceable() -> bool {
132 VersionedAssistantSettingsContent::is_referenceable()
133 }
134}
135
136impl Default for AssistantSettingsContent {
137 fn default() -> Self {
138 Self::Versioned(Box::new(VersionedAssistantSettingsContent::default()))
139 }
140}
141
142impl AssistantSettingsContent {
143 pub fn is_version_outdated(&self) -> bool {
144 match self {
145 AssistantSettingsContent::Versioned(settings) => match **settings {
146 VersionedAssistantSettingsContent::V1(_) => true,
147 VersionedAssistantSettingsContent::V2(_) => false,
148 },
149 AssistantSettingsContent::Legacy(_) => true,
150 }
151 }
152
153 fn upgrade(&self) -> AssistantSettingsContentV2 {
154 match self {
155 AssistantSettingsContent::Versioned(settings) => match **settings {
156 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
157 enabled: settings.enabled,
158 button: settings.button,
159 dock: settings.dock,
160 default_width: settings.default_width,
161 default_height: settings.default_width,
162 default_model: settings
163 .provider
164 .clone()
165 .and_then(|provider| match provider {
166 AssistantProviderContentV1::ZedDotDev { default_model } => {
167 default_model.map(|model| LanguageModelSelection {
168 provider: "zed.dev".to_string(),
169 model: model.id().to_string(),
170 })
171 }
172 AssistantProviderContentV1::OpenAi { default_model, .. } => {
173 default_model.map(|model| LanguageModelSelection {
174 provider: "openai".to_string(),
175 model: model.id().to_string(),
176 })
177 }
178 AssistantProviderContentV1::Anthropic { default_model, .. } => {
179 default_model.map(|model| LanguageModelSelection {
180 provider: "anthropic".to_string(),
181 model: model.id().to_string(),
182 })
183 }
184 AssistantProviderContentV1::Ollama { default_model, .. } => {
185 default_model.map(|model| LanguageModelSelection {
186 provider: "ollama".to_string(),
187 model: model.id().to_string(),
188 })
189 }
190 AssistantProviderContentV1::LmStudio { default_model, .. } => {
191 default_model.map(|model| LanguageModelSelection {
192 provider: "lmstudio".to_string(),
193 model: model.id().to_string(),
194 })
195 }
196 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
197 default_model.map(|model| LanguageModelSelection {
198 provider: "deepseek".to_string(),
199 model: model.id().to_string(),
200 })
201 }
202 }),
203 inline_assistant_model: None,
204 commit_message_model: None,
205 thread_summary_model: None,
206 inline_alternatives: None,
207 enable_experimental_live_diffs: None,
208 default_profile: None,
209 profiles: None,
210 always_allow_tool_actions: None,
211 notify_when_agent_waiting: None,
212 },
213 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
214 },
215 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
216 enabled: None,
217 button: settings.button,
218 dock: settings.dock,
219 default_width: settings.default_width,
220 default_height: settings.default_height,
221 default_model: Some(LanguageModelSelection {
222 provider: "openai".to_string(),
223 model: settings
224 .default_open_ai_model
225 .clone()
226 .unwrap_or_default()
227 .id()
228 .to_string(),
229 }),
230 inline_assistant_model: None,
231 commit_message_model: None,
232 thread_summary_model: None,
233 inline_alternatives: None,
234 enable_experimental_live_diffs: None,
235 default_profile: None,
236 profiles: None,
237 always_allow_tool_actions: None,
238 notify_when_agent_waiting: None,
239 },
240 }
241 }
242
243 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
244 match self {
245 AssistantSettingsContent::Versioned(settings) => match **settings {
246 VersionedAssistantSettingsContent::V1(ref mut settings) => {
247 settings.dock = Some(dock);
248 }
249 VersionedAssistantSettingsContent::V2(ref mut settings) => {
250 settings.dock = Some(dock);
251 }
252 },
253 AssistantSettingsContent::Legacy(settings) => {
254 settings.dock = Some(dock);
255 }
256 }
257 }
258
259 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
260 let model = language_model.id().0.to_string();
261 let provider = language_model.provider_id().0.to_string();
262
263 match self {
264 AssistantSettingsContent::Versioned(settings) => match **settings {
265 VersionedAssistantSettingsContent::V1(ref mut settings) => {
266 match provider.as_ref() {
267 "zed.dev" => {
268 log::warn!("attempted to set zed.dev model on outdated settings");
269 }
270 "anthropic" => {
271 let api_url = match &settings.provider {
272 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
273 api_url.clone()
274 }
275 _ => None,
276 };
277 settings.provider = Some(AssistantProviderContentV1::Anthropic {
278 default_model: AnthropicModel::from_id(&model).ok(),
279 api_url,
280 });
281 }
282 "ollama" => {
283 let api_url = match &settings.provider {
284 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
285 api_url.clone()
286 }
287 _ => None,
288 };
289 settings.provider = Some(AssistantProviderContentV1::Ollama {
290 default_model: Some(ollama::Model::new(&model, None, None)),
291 api_url,
292 });
293 }
294 "lmstudio" => {
295 let api_url = match &settings.provider {
296 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
297 api_url.clone()
298 }
299 _ => None,
300 };
301 settings.provider = Some(AssistantProviderContentV1::LmStudio {
302 default_model: Some(lmstudio::Model::new(&model, None, None)),
303 api_url,
304 });
305 }
306 "openai" => {
307 let (api_url, available_models) = match &settings.provider {
308 Some(AssistantProviderContentV1::OpenAi {
309 api_url,
310 available_models,
311 ..
312 }) => (api_url.clone(), available_models.clone()),
313 _ => (None, None),
314 };
315 settings.provider = Some(AssistantProviderContentV1::OpenAi {
316 default_model: OpenAiModel::from_id(&model).ok(),
317 api_url,
318 available_models,
319 });
320 }
321 "deepseek" => {
322 let api_url = match &settings.provider {
323 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
324 api_url.clone()
325 }
326 _ => None,
327 };
328 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
329 default_model: DeepseekModel::from_id(&model).ok(),
330 api_url,
331 });
332 }
333 _ => {}
334 }
335 }
336 VersionedAssistantSettingsContent::V2(ref mut settings) => {
337 settings.default_model = Some(LanguageModelSelection { provider, model });
338 }
339 },
340 AssistantSettingsContent::Legacy(settings) => {
341 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
342 settings.default_open_ai_model = Some(model);
343 }
344 }
345 }
346 }
347
348 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
349 if let AssistantSettingsContent::Versioned(boxed) = self {
350 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
351 settings.inline_assistant_model = Some(LanguageModelSelection { provider, model });
352 }
353 }
354 }
355
356 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
357 if let AssistantSettingsContent::Versioned(boxed) = self {
358 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
359 settings.commit_message_model = Some(LanguageModelSelection { provider, model });
360 }
361 }
362 }
363
364 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
365 if let AssistantSettingsContent::Versioned(boxed) = self {
366 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
367 settings.thread_summary_model = Some(LanguageModelSelection { provider, model });
368 }
369 }
370 }
371
372 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
373 let AssistantSettingsContent::Versioned(boxed) = self else {
374 return;
375 };
376
377 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
378 settings.always_allow_tool_actions = Some(allow);
379 }
380 }
381
382 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
383 let AssistantSettingsContent::Versioned(boxed) = self else {
384 return;
385 };
386
387 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
388 settings.default_profile = Some(profile_id);
389 }
390 }
391
392 pub fn create_profile(
393 &mut self,
394 profile_id: AgentProfileId,
395 profile: AgentProfile,
396 ) -> Result<()> {
397 let AssistantSettingsContent::Versioned(boxed) = self else {
398 return Ok(());
399 };
400
401 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
402 let profiles = settings.profiles.get_or_insert_default();
403 if profiles.contains_key(&profile_id) {
404 bail!("profile with ID '{profile_id}' already exists");
405 }
406
407 profiles.insert(
408 profile_id,
409 AgentProfileContent {
410 name: profile.name.into(),
411 tools: profile.tools,
412 enable_all_context_servers: Some(profile.enable_all_context_servers),
413 context_servers: profile
414 .context_servers
415 .into_iter()
416 .map(|(server_id, preset)| {
417 (
418 server_id,
419 ContextServerPresetContent {
420 tools: preset.tools,
421 },
422 )
423 })
424 .collect(),
425 },
426 );
427 }
428
429 Ok(())
430 }
431}
432
433#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
434#[serde(tag = "version")]
435pub enum VersionedAssistantSettingsContent {
436 #[serde(rename = "1")]
437 V1(AssistantSettingsContentV1),
438 #[serde(rename = "2")]
439 V2(AssistantSettingsContentV2),
440}
441
442impl Default for VersionedAssistantSettingsContent {
443 fn default() -> Self {
444 Self::V2(AssistantSettingsContentV2 {
445 enabled: None,
446 button: None,
447 dock: None,
448 default_width: None,
449 default_height: None,
450 default_model: None,
451 inline_assistant_model: None,
452 commit_message_model: None,
453 thread_summary_model: None,
454 inline_alternatives: None,
455 enable_experimental_live_diffs: None,
456 default_profile: None,
457 profiles: None,
458 always_allow_tool_actions: None,
459 notify_when_agent_waiting: None,
460 })
461 }
462}
463
464#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
465pub struct AssistantSettingsContentV2 {
466 /// Whether the Assistant is enabled.
467 ///
468 /// Default: true
469 enabled: Option<bool>,
470 /// Whether to show the assistant panel button in the status bar.
471 ///
472 /// Default: true
473 button: Option<bool>,
474 /// Where to dock the assistant.
475 ///
476 /// Default: right
477 dock: Option<AssistantDockPosition>,
478 /// Default width in pixels when the assistant is docked to the left or right.
479 ///
480 /// Default: 640
481 default_width: Option<f32>,
482 /// Default height in pixels when the assistant is docked to the bottom.
483 ///
484 /// Default: 320
485 default_height: Option<f32>,
486 /// The default model to use when creating new chats and for other features when a specific model is not specified.
487 default_model: Option<LanguageModelSelection>,
488 /// Model to use for the inline assistant. Defaults to default_model when not specified.
489 inline_assistant_model: Option<LanguageModelSelection>,
490 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
491 commit_message_model: Option<LanguageModelSelection>,
492 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
493 thread_summary_model: Option<LanguageModelSelection>,
494 /// Additional models with which to generate alternatives when performing inline assists.
495 inline_alternatives: Option<Vec<LanguageModelSelection>>,
496 /// Enable experimental live diffs in the assistant panel.
497 ///
498 /// Default: false
499 enable_experimental_live_diffs: Option<bool>,
500 /// The default profile to use in the Agent.
501 ///
502 /// Default: write
503 default_profile: Option<AgentProfileId>,
504 /// The available agent profiles.
505 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
506 /// Whenever a tool action would normally wait for your confirmation
507 /// that you allow it, always choose to allow it.
508 ///
509 /// Default: false
510 always_allow_tool_actions: Option<bool>,
511 /// Where to show a popup notification when the agent is waiting for user input.
512 ///
513 /// Default: "primary_screen"
514 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
515}
516
517#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
518pub struct LanguageModelSelection {
519 #[schemars(schema_with = "providers_schema")]
520 pub provider: String,
521 pub model: String,
522}
523
524fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
525 schemars::schema::SchemaObject {
526 enum_values: Some(vec![
527 "anthropic".into(),
528 "bedrock".into(),
529 "google".into(),
530 "lmstudio".into(),
531 "ollama".into(),
532 "openai".into(),
533 "zed.dev".into(),
534 "copilot_chat".into(),
535 "deepseek".into(),
536 ]),
537 ..Default::default()
538 }
539 .into()
540}
541
542impl Default for LanguageModelSelection {
543 fn default() -> Self {
544 Self {
545 provider: "openai".to_string(),
546 model: "gpt-4".to_string(),
547 }
548 }
549}
550
551#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
552pub struct AgentProfileContent {
553 pub name: Arc<str>,
554 #[serde(default)]
555 pub tools: IndexMap<Arc<str>, bool>,
556 /// Whether all context servers are enabled by default.
557 pub enable_all_context_servers: Option<bool>,
558 #[serde(default)]
559 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
560}
561
562#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
563pub struct ContextServerPresetContent {
564 pub tools: IndexMap<Arc<str>, bool>,
565}
566
567#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
568pub struct AssistantSettingsContentV1 {
569 /// Whether the Assistant is enabled.
570 ///
571 /// Default: true
572 enabled: Option<bool>,
573 /// Whether to show the assistant panel button in the status bar.
574 ///
575 /// Default: true
576 button: Option<bool>,
577 /// Where to dock the assistant.
578 ///
579 /// Default: right
580 dock: Option<AssistantDockPosition>,
581 /// Default width in pixels when the assistant is docked to the left or right.
582 ///
583 /// Default: 640
584 default_width: Option<f32>,
585 /// Default height in pixels when the assistant is docked to the bottom.
586 ///
587 /// Default: 320
588 default_height: Option<f32>,
589 /// The provider of the assistant service.
590 ///
591 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
592 /// each with their respective default models and configurations.
593 provider: Option<AssistantProviderContentV1>,
594}
595
596#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
597pub struct LegacyAssistantSettingsContent {
598 /// Whether to show the assistant panel button in the status bar.
599 ///
600 /// Default: true
601 pub button: Option<bool>,
602 /// Where to dock the assistant.
603 ///
604 /// Default: right
605 pub dock: Option<AssistantDockPosition>,
606 /// Default width in pixels when the assistant is docked to the left or right.
607 ///
608 /// Default: 640
609 pub default_width: Option<f32>,
610 /// Default height in pixels when the assistant is docked to the bottom.
611 ///
612 /// Default: 320
613 pub default_height: Option<f32>,
614 /// The default OpenAI model to use when creating new chats.
615 ///
616 /// Default: gpt-4-1106-preview
617 pub default_open_ai_model: Option<OpenAiModel>,
618 /// OpenAI API base URL to use when creating new chats.
619 ///
620 /// Default: <https://api.openai.com/v1>
621 pub openai_api_url: Option<String>,
622}
623
624impl Settings for AssistantSettings {
625 const KEY: Option<&'static str> = Some("assistant");
626
627 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
628
629 type FileContent = AssistantSettingsContent;
630
631 fn load(
632 sources: SettingsSources<Self::FileContent>,
633 _: &mut gpui::App,
634 ) -> anyhow::Result<Self> {
635 let mut settings = AssistantSettings::default();
636
637 for value in sources.defaults_and_customizations() {
638 if value.is_version_outdated() {
639 settings.using_outdated_settings_version = true;
640 }
641
642 let value = value.upgrade();
643 merge(&mut settings.enabled, value.enabled);
644 merge(&mut settings.button, value.button);
645 merge(&mut settings.dock, value.dock);
646 merge(
647 &mut settings.default_width,
648 value.default_width.map(Into::into),
649 );
650 merge(
651 &mut settings.default_height,
652 value.default_height.map(Into::into),
653 );
654 merge(&mut settings.default_model, value.default_model);
655 settings.inline_assistant_model = value
656 .inline_assistant_model
657 .or(settings.inline_assistant_model.take());
658 settings.commit_message_model = value
659 .commit_message_model
660 .or(settings.commit_message_model.take());
661 settings.thread_summary_model = value
662 .thread_summary_model
663 .or(settings.thread_summary_model.take());
664 merge(&mut settings.inline_alternatives, value.inline_alternatives);
665 merge(
666 &mut settings.enable_experimental_live_diffs,
667 value.enable_experimental_live_diffs,
668 );
669 merge(
670 &mut settings.always_allow_tool_actions,
671 value.always_allow_tool_actions,
672 );
673 merge(
674 &mut settings.notify_when_agent_waiting,
675 value.notify_when_agent_waiting,
676 );
677 merge(&mut settings.default_profile, value.default_profile);
678
679 if let Some(profiles) = value.profiles {
680 settings
681 .profiles
682 .extend(profiles.into_iter().map(|(id, profile)| {
683 (
684 id,
685 AgentProfile {
686 name: profile.name.into(),
687 tools: profile.tools,
688 enable_all_context_servers: profile
689 .enable_all_context_servers
690 .unwrap_or_default(),
691 context_servers: profile
692 .context_servers
693 .into_iter()
694 .map(|(context_server_id, preset)| {
695 (
696 context_server_id,
697 ContextServerPreset {
698 tools: preset.tools.clone(),
699 },
700 )
701 })
702 .collect(),
703 },
704 )
705 }));
706 }
707 }
708
709 Ok(settings)
710 }
711}
712
713fn merge<T>(target: &mut T, value: Option<T>) {
714 if let Some(value) = value {
715 *target = value;
716 }
717}
718
719#[cfg(test)]
720mod tests {
721 use fs::Fs;
722 use gpui::{ReadGlobal, TestAppContext};
723
724 use super::*;
725
726 #[gpui::test]
727 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
728 let fs = fs::FakeFs::new(cx.executor().clone());
729 fs.create_dir(paths::settings_file().parent().unwrap())
730 .await
731 .unwrap();
732
733 cx.update(|cx| {
734 let test_settings = settings::SettingsStore::test(cx);
735 cx.set_global(test_settings);
736 AssistantSettings::register(cx);
737 });
738
739 cx.update(|cx| {
740 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
741 assert_eq!(
742 AssistantSettings::get_global(cx).default_model,
743 LanguageModelSelection {
744 provider: "zed.dev".into(),
745 model: "claude-3-7-sonnet-latest".into(),
746 }
747 );
748 });
749
750 cx.update(|cx| {
751 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
752 fs.clone(),
753 |settings, _| {
754 *settings = AssistantSettingsContent::Versioned(Box::new(
755 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
756 default_model: Some(LanguageModelSelection {
757 provider: "test-provider".into(),
758 model: "gpt-99".into(),
759 }),
760 inline_assistant_model: None,
761 commit_message_model: None,
762 thread_summary_model: None,
763 inline_alternatives: None,
764 enabled: None,
765 button: None,
766 dock: None,
767 default_width: None,
768 default_height: None,
769 enable_experimental_live_diffs: None,
770 default_profile: None,
771 profiles: None,
772 always_allow_tool_actions: None,
773 notify_when_agent_waiting: None,
774 }),
775 ))
776 },
777 );
778 });
779
780 cx.run_until_parked();
781
782 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
783 assert!(raw_settings_value.contains(r#""version": "2""#));
784
785 #[derive(Debug, Deserialize)]
786 struct AssistantSettingsTest {
787 assistant: AssistantSettingsContent,
788 }
789
790 let assistant_settings: AssistantSettingsTest =
791 serde_json_lenient::from_str(&raw_settings_value).unwrap();
792
793 assert!(!assistant_settings.assistant.is_version_outdated());
794 }
795}