settings: Remove version fields (#33372)

Bennet Bo Fenner created

This cleans up our settings to not include any `version` fields, as we
have an actual settings migrator now.

This PR removes `language_models > anthropic > version`,
`language_models > openai > version` and `agent > version`.

We had migration paths in the code for a long time, so in practice
almost everyone should be using the latest version of these settings.


Release Notes:

- Remove `version` fields in settings for `agent`, `language_models >
anthropic`, `language_models > openai`. Your settings will automatically
be migrated. If you're running into issues with this open an issue
[here](https://github.com/zed-industries/zed/issues)

Change summary

Cargo.lock                                              |   7 
crates/agent_settings/Cargo.toml                        |   7 
crates/agent_settings/src/agent_settings.rs             | 747 ----------
crates/agent_ui/src/agent_configuration/tool_picker.rs  |  65 
crates/assistant_tools/src/edit_agent/evals.rs          |   2 
crates/eval/src/eval.rs                                 |   2 
crates/language_models/Cargo.toml                       |   1 
crates/language_models/src/language_models.rs           |   5 
crates/language_models/src/provider/anthropic.rs        |   1 
crates/language_models/src/provider/mistral.rs          |   1 
crates/language_models/src/provider/open_ai.rs          |  46 
crates/language_models/src/provider/vercel.rs           |   1 
crates/language_models/src/settings.rs                  | 188 --
crates/migrator/src/migrations.rs                       |   6 
crates/migrator/src/migrations/m_2025_06_25/settings.rs | 133 +
crates/migrator/src/migrator.rs                         |  79 +
crates/zed/src/main.rs                                  |   7 
crates/zed/src/zed.rs                                   |   7 
18 files changed, 332 insertions(+), 973 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -110,18 +110,11 @@ dependencies = [
 name = "agent_settings"
 version = "0.1.0"
 dependencies = [
- "anthropic",
  "anyhow",
  "collections",
- "deepseek",
  "fs",
  "gpui",
  "language_model",
- "lmstudio",
- "log",
- "mistral",
- "ollama",
- "open_ai",
  "paths",
  "schemars",
  "serde",

crates/agent_settings/Cargo.toml 🔗

@@ -12,17 +12,10 @@ workspace = true
 path = "src/agent_settings.rs"
 
 [dependencies]
-anthropic = { workspace = true, features = ["schemars"] }
 anyhow.workspace = true
 collections.workspace = true
 gpui.workspace = true
 language_model.workspace = true
-lmstudio = { workspace = true, features = ["schemars"] }
-log.workspace = true
-ollama = { workspace = true, features = ["schemars"] }
-open_ai = { workspace = true, features = ["schemars"] }
-deepseek = { workspace = true, features = ["schemars"] }
-mistral = { workspace = true, features = ["schemars"] }
 schemars.workspace = true
 serde.workspace = true
 settings.workspace = true

crates/agent_settings/src/agent_settings.rs 🔗

@@ -2,16 +2,10 @@ mod agent_profile;
 
 use std::sync::Arc;
 
-use ::open_ai::Model as OpenAiModel;
-use anthropic::Model as AnthropicModel;
 use anyhow::{Result, bail};
 use collections::IndexMap;
-use deepseek::Model as DeepseekModel;
 use gpui::{App, Pixels, SharedString};
 use language_model::LanguageModel;
-use lmstudio::Model as LmStudioModel;
-use mistral::Model as MistralModel;
-use ollama::Model as OllamaModel;
 use schemars::{JsonSchema, schema::Schema};
 use serde::{Deserialize, Serialize};
 use settings::{Settings, SettingsSources};
@@ -48,45 +42,6 @@ pub enum NotifyWhenAgentWaiting {
     Never,
 }
 
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
-#[serde(tag = "name", rename_all = "snake_case")]
-#[schemars(deny_unknown_fields)]
-pub enum AgentProviderContentV1 {
-    #[serde(rename = "zed.dev")]
-    ZedDotDev { default_model: Option<String> },
-    #[serde(rename = "openai")]
-    OpenAi {
-        default_model: Option<OpenAiModel>,
-        api_url: Option<String>,
-        available_models: Option<Vec<OpenAiModel>>,
-    },
-    #[serde(rename = "anthropic")]
-    Anthropic {
-        default_model: Option<AnthropicModel>,
-        api_url: Option<String>,
-    },
-    #[serde(rename = "ollama")]
-    Ollama {
-        default_model: Option<OllamaModel>,
-        api_url: Option<String>,
-    },
-    #[serde(rename = "lmstudio")]
-    LmStudio {
-        default_model: Option<LmStudioModel>,
-        api_url: Option<String>,
-    },
-    #[serde(rename = "deepseek")]
-    DeepSeek {
-        default_model: Option<DeepseekModel>,
-        api_url: Option<String>,
-    },
-    #[serde(rename = "mistral")]
-    Mistral {
-        default_model: Option<MistralModel>,
-        api_url: Option<String>,
-    },
-}
-
 #[derive(Default, Clone, Debug)]
 pub struct AgentSettings {
     pub enabled: bool,
@@ -168,366 +123,56 @@ impl LanguageModelParameters {
     }
 }
 
-/// Agent panel settings
-#[derive(Clone, Serialize, Deserialize, Debug, Default)]
-pub struct AgentSettingsContent {
-    #[serde(flatten)]
-    pub inner: Option<AgentSettingsContentInner>,
-}
-
-#[derive(Clone, Serialize, Deserialize, Debug)]
-#[serde(untagged)]
-pub enum AgentSettingsContentInner {
-    Versioned(Box<VersionedAgentSettingsContent>),
-    Legacy(LegacyAgentSettingsContent),
-}
-
-impl AgentSettingsContentInner {
-    fn for_v2(content: AgentSettingsContentV2) -> Self {
-        AgentSettingsContentInner::Versioned(Box::new(VersionedAgentSettingsContent::V2(content)))
-    }
-}
-
-impl JsonSchema for AgentSettingsContent {
-    fn schema_name() -> String {
-        VersionedAgentSettingsContent::schema_name()
-    }
-
-    fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
-        VersionedAgentSettingsContent::json_schema(r#gen)
-    }
-
-    fn is_referenceable() -> bool {
-        VersionedAgentSettingsContent::is_referenceable()
-    }
-}
-
 impl AgentSettingsContent {
-    pub fn is_version_outdated(&self) -> bool {
-        match &self.inner {
-            Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
-                VersionedAgentSettingsContent::V1(_) => true,
-                VersionedAgentSettingsContent::V2(_) => false,
-            },
-            Some(AgentSettingsContentInner::Legacy(_)) => true,
-            None => false,
-        }
-    }
-
-    fn upgrade(&self) -> AgentSettingsContentV2 {
-        match &self.inner {
-            Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
-                VersionedAgentSettingsContent::V1(ref settings) => AgentSettingsContentV2 {
-                    enabled: settings.enabled,
-                    button: settings.button,
-                    dock: settings.dock,
-                    default_width: settings.default_width,
-                    default_height: settings.default_width,
-                    default_model: settings
-                        .provider
-                        .clone()
-                        .and_then(|provider| match provider {
-                            AgentProviderContentV1::ZedDotDev { default_model } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "zed.dev".into(),
-                                    model,
-                                }),
-                            AgentProviderContentV1::OpenAi { default_model, .. } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "openai".into(),
-                                    model: model.id().to_string(),
-                                }),
-                            AgentProviderContentV1::Anthropic { default_model, .. } => {
-                                default_model.map(|model| LanguageModelSelection {
-                                    provider: "anthropic".into(),
-                                    model: model.id().to_string(),
-                                })
-                            }
-                            AgentProviderContentV1::Ollama { default_model, .. } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "ollama".into(),
-                                    model: model.id().to_string(),
-                                }),
-                            AgentProviderContentV1::LmStudio { default_model, .. } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "lmstudio".into(),
-                                    model: model.id().to_string(),
-                                }),
-                            AgentProviderContentV1::DeepSeek { default_model, .. } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "deepseek".into(),
-                                    model: model.id().to_string(),
-                                }),
-                            AgentProviderContentV1::Mistral { default_model, .. } => default_model
-                                .map(|model| LanguageModelSelection {
-                                    provider: "mistral".into(),
-                                    model: model.id().to_string(),
-                                }),
-                        }),
-                    inline_assistant_model: None,
-                    commit_message_model: None,
-                    thread_summary_model: None,
-                    inline_alternatives: None,
-                    default_profile: None,
-                    default_view: None,
-                    profiles: None,
-                    always_allow_tool_actions: None,
-                    notify_when_agent_waiting: None,
-                    stream_edits: None,
-                    single_file_review: None,
-                    model_parameters: Vec::new(),
-                    preferred_completion_mode: None,
-                    enable_feedback: None,
-                    play_sound_when_agent_done: None,
-                },
-                VersionedAgentSettingsContent::V2(ref settings) => settings.clone(),
-            },
-            Some(AgentSettingsContentInner::Legacy(settings)) => AgentSettingsContentV2 {
-                enabled: None,
-                button: settings.button,
-                dock: settings.dock,
-                default_width: settings.default_width,
-                default_height: settings.default_height,
-                default_model: Some(LanguageModelSelection {
-                    provider: "openai".into(),
-                    model: settings
-                        .default_open_ai_model
-                        .clone()
-                        .unwrap_or_default()
-                        .id()
-                        .to_string(),
-                }),
-                inline_assistant_model: None,
-                commit_message_model: None,
-                thread_summary_model: None,
-                inline_alternatives: None,
-                default_profile: None,
-                default_view: None,
-                profiles: None,
-                always_allow_tool_actions: None,
-                notify_when_agent_waiting: None,
-                stream_edits: None,
-                single_file_review: None,
-                model_parameters: Vec::new(),
-                preferred_completion_mode: None,
-                enable_feedback: None,
-                play_sound_when_agent_done: None,
-            },
-            None => AgentSettingsContentV2::default(),
-        }
-    }
-
     pub fn set_dock(&mut self, dock: AgentDockPosition) {
-        match &mut self.inner {
-            Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
-                VersionedAgentSettingsContent::V1(ref mut settings) => {
-                    settings.dock = Some(dock);
-                }
-                VersionedAgentSettingsContent::V2(ref mut settings) => {
-                    settings.dock = Some(dock);
-                }
-            },
-            Some(AgentSettingsContentInner::Legacy(settings)) => {
-                settings.dock = Some(dock);
-            }
-            None => {
-                self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                    dock: Some(dock),
-                    ..Default::default()
-                }))
-            }
-        }
+        self.dock = Some(dock);
     }
 
     pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
         let model = language_model.id().0.to_string();
         let provider = language_model.provider_id().0.to_string();
 
-        match &mut self.inner {
-            Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
-                VersionedAgentSettingsContent::V1(ref mut settings) => match provider.as_ref() {
-                    "zed.dev" => {
-                        log::warn!("attempted to set zed.dev model on outdated settings");
-                    }
-                    "anthropic" => {
-                        let api_url = match &settings.provider {
-                            Some(AgentProviderContentV1::Anthropic { api_url, .. }) => {
-                                api_url.clone()
-                            }
-                            _ => None,
-                        };
-                        settings.provider = Some(AgentProviderContentV1::Anthropic {
-                            default_model: AnthropicModel::from_id(&model).ok(),
-                            api_url,
-                        });
-                    }
-                    "ollama" => {
-                        let api_url = match &settings.provider {
-                            Some(AgentProviderContentV1::Ollama { api_url, .. }) => api_url.clone(),
-                            _ => None,
-                        };
-                        settings.provider = Some(AgentProviderContentV1::Ollama {
-                            default_model: Some(ollama::Model::new(
-                                &model,
-                                None,
-                                None,
-                                Some(language_model.supports_tools()),
-                                Some(language_model.supports_images()),
-                                None,
-                            )),
-                            api_url,
-                        });
-                    }
-                    "lmstudio" => {
-                        let api_url = match &settings.provider {
-                            Some(AgentProviderContentV1::LmStudio { api_url, .. }) => {
-                                api_url.clone()
-                            }
-                            _ => None,
-                        };
-                        settings.provider = Some(AgentProviderContentV1::LmStudio {
-                            default_model: Some(lmstudio::Model::new(
-                                &model, None, None, false, false,
-                            )),
-                            api_url,
-                        });
-                    }
-                    "openai" => {
-                        let (api_url, available_models) = match &settings.provider {
-                            Some(AgentProviderContentV1::OpenAi {
-                                api_url,
-                                available_models,
-                                ..
-                            }) => (api_url.clone(), available_models.clone()),
-                            _ => (None, None),
-                        };
-                        settings.provider = Some(AgentProviderContentV1::OpenAi {
-                            default_model: OpenAiModel::from_id(&model).ok(),
-                            api_url,
-                            available_models,
-                        });
-                    }
-                    "deepseek" => {
-                        let api_url = match &settings.provider {
-                            Some(AgentProviderContentV1::DeepSeek { api_url, .. }) => {
-                                api_url.clone()
-                            }
-                            _ => None,
-                        };
-                        settings.provider = Some(AgentProviderContentV1::DeepSeek {
-                            default_model: DeepseekModel::from_id(&model).ok(),
-                            api_url,
-                        });
-                    }
-                    _ => {}
-                },
-                VersionedAgentSettingsContent::V2(ref mut settings) => {
-                    settings.default_model = Some(LanguageModelSelection {
-                        provider: provider.into(),
-                        model,
-                    });
-                }
-            },
-            Some(AgentSettingsContentInner::Legacy(settings)) => {
-                if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
-                    settings.default_open_ai_model = Some(model);
-                }
-            }
-            None => {
-                self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                    default_model: Some(LanguageModelSelection {
-                        provider: provider.into(),
-                        model,
-                    }),
-                    ..Default::default()
-                }));
-            }
-        }
+        self.default_model = Some(LanguageModelSelection {
+            provider: provider.into(),
+            model,
+        });
     }
 
     pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
-        self.v2_setting(|setting| {
-            setting.inline_assistant_model = Some(LanguageModelSelection {
-                provider: provider.into(),
-                model,
-            });
-            Ok(())
-        })
-        .ok();
+        self.inline_assistant_model = Some(LanguageModelSelection {
+            provider: provider.into(),
+            model,
+        });
     }
 
     pub fn set_commit_message_model(&mut self, provider: String, model: String) {
-        self.v2_setting(|setting| {
-            setting.commit_message_model = Some(LanguageModelSelection {
-                provider: provider.into(),
-                model,
-            });
-            Ok(())
-        })
-        .ok();
-    }
-
-    pub fn v2_setting(
-        &mut self,
-        f: impl FnOnce(&mut AgentSettingsContentV2) -> anyhow::Result<()>,
-    ) -> anyhow::Result<()> {
-        match self.inner.get_or_insert_with(|| {
-            AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                ..Default::default()
-            })
-        }) {
-            AgentSettingsContentInner::Versioned(boxed) => {
-                if let VersionedAgentSettingsContent::V2(ref mut settings) = **boxed {
-                    f(settings)
-                } else {
-                    Ok(())
-                }
-            }
-            _ => Ok(()),
-        }
+        self.commit_message_model = Some(LanguageModelSelection {
+            provider: provider.into(),
+            model,
+        });
     }
 
     pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
-        self.v2_setting(|setting| {
-            setting.thread_summary_model = Some(LanguageModelSelection {
-                provider: provider.into(),
-                model,
-            });
-            Ok(())
-        })
-        .ok();
+        self.thread_summary_model = Some(LanguageModelSelection {
+            provider: provider.into(),
+            model,
+        });
     }
 
     pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
-        self.v2_setting(|setting| {
-            setting.always_allow_tool_actions = Some(allow);
-            Ok(())
-        })
-        .ok();
+        self.always_allow_tool_actions = Some(allow);
     }
 
     pub fn set_play_sound_when_agent_done(&mut self, allow: bool) {
-        self.v2_setting(|setting| {
-            setting.play_sound_when_agent_done = Some(allow);
-            Ok(())
-        })
-        .ok();
+        self.play_sound_when_agent_done = Some(allow);
     }
 
     pub fn set_single_file_review(&mut self, allow: bool) {
-        self.v2_setting(|setting| {
-            setting.single_file_review = Some(allow);
-            Ok(())
-        })
-        .ok();
+        self.single_file_review = Some(allow);
     }
 
     pub fn set_profile(&mut self, profile_id: AgentProfileId) {
-        self.v2_setting(|setting| {
-            setting.default_profile = Some(profile_id);
-            Ok(())
-        })
-        .ok();
+        self.default_profile = Some(profile_id);
     }
 
     pub fn create_profile(
@@ -535,79 +180,39 @@ impl AgentSettingsContent {
         profile_id: AgentProfileId,
         profile_settings: AgentProfileSettings,
     ) -> Result<()> {
-        self.v2_setting(|settings| {
-            let profiles = settings.profiles.get_or_insert_default();
-            if profiles.contains_key(&profile_id) {
-                bail!("profile with ID '{profile_id}' already exists");
-            }
-
-            profiles.insert(
-                profile_id,
-                AgentProfileContent {
-                    name: profile_settings.name.into(),
-                    tools: profile_settings.tools,
-                    enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
-                    context_servers: profile_settings
-                        .context_servers
-                        .into_iter()
-                        .map(|(server_id, preset)| {
-                            (
-                                server_id,
-                                ContextServerPresetContent {
-                                    tools: preset.tools,
-                                },
-                            )
-                        })
-                        .collect(),
-                },
-            );
-
-            Ok(())
-        })
-    }
-}
+        let profiles = self.profiles.get_or_insert_default();
+        if profiles.contains_key(&profile_id) {
+            bail!("profile with ID '{profile_id}' already exists");
+        }
 
-#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
-#[serde(tag = "version")]
-#[schemars(deny_unknown_fields)]
-pub enum VersionedAgentSettingsContent {
-    #[serde(rename = "1")]
-    V1(AgentSettingsContentV1),
-    #[serde(rename = "2")]
-    V2(AgentSettingsContentV2),
-}
+        profiles.insert(
+            profile_id,
+            AgentProfileContent {
+                name: profile_settings.name.into(),
+                tools: profile_settings.tools,
+                enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
+                context_servers: profile_settings
+                    .context_servers
+                    .into_iter()
+                    .map(|(server_id, preset)| {
+                        (
+                            server_id,
+                            ContextServerPresetContent {
+                                tools: preset.tools,
+                            },
+                        )
+                    })
+                    .collect(),
+            },
+        );
 
-impl Default for VersionedAgentSettingsContent {
-    fn default() -> Self {
-        Self::V2(AgentSettingsContentV2 {
-            enabled: None,
-            button: None,
-            dock: None,
-            default_width: None,
-            default_height: None,
-            default_model: None,
-            inline_assistant_model: None,
-            commit_message_model: None,
-            thread_summary_model: None,
-            inline_alternatives: None,
-            default_profile: None,
-            default_view: None,
-            profiles: None,
-            always_allow_tool_actions: None,
-            notify_when_agent_waiting: None,
-            stream_edits: None,
-            single_file_review: None,
-            model_parameters: Vec::new(),
-            preferred_completion_mode: None,
-            enable_feedback: None,
-            play_sound_when_agent_done: None,
-        })
+        Ok(())
     }
 }
 
 #[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
 #[schemars(deny_unknown_fields)]
-pub struct AgentSettingsContentV2 {
+pub struct AgentSettingsContent {
     /// Whether the Agent is enabled.
     ///
     /// Default: true
@@ -779,65 +384,6 @@ pub struct ContextServerPresetContent {
     pub tools: IndexMap<Arc<str>, bool>,
 }
 
-#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
-pub struct AgentSettingsContentV1 {
-    /// Whether the Agent is enabled.
-    ///
-    /// Default: true
-    enabled: Option<bool>,
-    /// Whether to show the Agent panel button in the status bar.
-    ///
-    /// Default: true
-    button: Option<bool>,
-    /// Where to dock the Agent.
-    ///
-    /// Default: right
-    dock: Option<AgentDockPosition>,
-    /// Default width in pixels when the Agent is docked to the left or right.
-    ///
-    /// Default: 640
-    default_width: Option<f32>,
-    /// Default height in pixels when the Agent is docked to the bottom.
-    ///
-    /// Default: 320
-    default_height: Option<f32>,
-    /// The provider of the Agent service.
-    ///
-    /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
-    /// each with their respective default models and configurations.
-    provider: Option<AgentProviderContentV1>,
-}
-
-#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
-#[schemars(deny_unknown_fields)]
-pub struct LegacyAgentSettingsContent {
-    /// Whether to show the Agent panel button in the status bar.
-    ///
-    /// Default: true
-    pub button: Option<bool>,
-    /// Where to dock the Agent.
-    ///
-    /// Default: right
-    pub dock: Option<AgentDockPosition>,
-    /// Default width in pixels when the Agent is docked to the left or right.
-    ///
-    /// Default: 640
-    pub default_width: Option<f32>,
-    /// Default height in pixels when the Agent is docked to the bottom.
-    ///
-    /// Default: 320
-    pub default_height: Option<f32>,
-    /// The default OpenAI model to use when creating new chats.
-    ///
-    /// Default: gpt-4-1106-preview
-    pub default_open_ai_model: Option<OpenAiModel>,
-    /// OpenAI API base URL to use when creating new chats.
-    ///
-    /// Default: <https://api.openai.com/v1>
-    pub openai_api_url: Option<String>,
-}
-
 impl Settings for AgentSettings {
     const KEY: Option<&'static str> = Some("agent");
 
@@ -854,11 +400,6 @@ impl Settings for AgentSettings {
         let mut settings = AgentSettings::default();
 
         for value in sources.defaults_and_customizations() {
-            if value.is_version_outdated() {
-                settings.using_outdated_settings_version = true;
-            }
-
-            let value = value.upgrade();
             merge(&mut settings.enabled, value.enabled);
             merge(&mut settings.button, value.button);
             merge(&mut settings.dock, value.dock);
@@ -870,17 +411,23 @@ impl Settings for AgentSettings {
                 &mut settings.default_height,
                 value.default_height.map(Into::into),
             );
-            merge(&mut settings.default_model, value.default_model);
+            merge(&mut settings.default_model, value.default_model.clone());
             settings.inline_assistant_model = value
                 .inline_assistant_model
+                .clone()
                 .or(settings.inline_assistant_model.take());
             settings.commit_message_model = value
+                .clone()
                 .commit_message_model
                 .or(settings.commit_message_model.take());
             settings.thread_summary_model = value
+                .clone()
                 .thread_summary_model
                 .or(settings.thread_summary_model.take());
-            merge(&mut settings.inline_alternatives, value.inline_alternatives);
+            merge(
+                &mut settings.inline_alternatives,
+                value.inline_alternatives.clone(),
+            );
             merge(
                 &mut settings.always_allow_tool_actions,
                 value.always_allow_tool_actions,
@@ -895,7 +442,7 @@ impl Settings for AgentSettings {
             );
             merge(&mut settings.stream_edits, value.stream_edits);
             merge(&mut settings.single_file_review, value.single_file_review);
-            merge(&mut settings.default_profile, value.default_profile);
+            merge(&mut settings.default_profile, value.default_profile.clone());
             merge(&mut settings.default_view, value.default_view);
             merge(
                 &mut settings.preferred_completion_mode,
@@ -907,24 +454,24 @@ impl Settings for AgentSettings {
                 .model_parameters
                 .extend_from_slice(&value.model_parameters);
 
-            if let Some(profiles) = value.profiles {
+            if let Some(profiles) = value.profiles.as_ref() {
                 settings
                     .profiles
                     .extend(profiles.into_iter().map(|(id, profile)| {
                         (
-                            id,
+                            id.clone(),
                             AgentProfileSettings {
-                                name: profile.name.into(),
-                                tools: profile.tools,
+                                name: profile.name.clone().into(),
+                                tools: profile.tools.clone(),
                                 enable_all_context_servers: profile
                                     .enable_all_context_servers
                                     .unwrap_or_default(),
                                 context_servers: profile
                                     .context_servers
-                                    .into_iter()
+                                    .iter()
                                     .map(|(context_server_id, preset)| {
                                         (
-                                            context_server_id,
+                                            context_server_id.clone(),
                                             ContextServerPreset {
                                                 tools: preset.tools.clone(),
                                             },
@@ -945,28 +492,8 @@ impl Settings for AgentSettings {
             .read_value("chat.agent.enabled")
             .and_then(|b| b.as_bool())
         {
-            match &mut current.inner {
-                Some(AgentSettingsContentInner::Versioned(versioned)) => match versioned.as_mut() {
-                    VersionedAgentSettingsContent::V1(setting) => {
-                        setting.enabled = Some(b);
-                        setting.button = Some(b);
-                    }
-
-                    VersionedAgentSettingsContent::V2(setting) => {
-                        setting.enabled = Some(b);
-                        setting.button = Some(b);
-                    }
-                },
-                Some(AgentSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
-                None => {
-                    current.inner =
-                        Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                            enabled: Some(b),
-                            button: Some(b),
-                            ..Default::default()
-                        }));
-                }
-            }
+            current.enabled = Some(b);
+            current.button = Some(b);
         }
     }
 }
@@ -976,149 +503,3 @@ fn merge<T>(target: &mut T, value: Option<T>) {
         *target = value;
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use fs::Fs;
-    use gpui::{ReadGlobal, TestAppContext};
-    use settings::SettingsStore;
-
-    use super::*;
-
-    #[gpui::test]
-    async fn test_deserialize_agent_settings_with_version(cx: &mut TestAppContext) {
-        let fs = fs::FakeFs::new(cx.executor().clone());
-        fs.create_dir(paths::settings_file().parent().unwrap())
-            .await
-            .unwrap();
-
-        cx.update(|cx| {
-            let test_settings = settings::SettingsStore::test(cx);
-            cx.set_global(test_settings);
-            AgentSettings::register(cx);
-        });
-
-        cx.update(|cx| {
-            assert!(!AgentSettings::get_global(cx).using_outdated_settings_version);
-            assert_eq!(
-                AgentSettings::get_global(cx).default_model,
-                LanguageModelSelection {
-                    provider: "zed.dev".into(),
-                    model: "claude-sonnet-4".into(),
-                }
-            );
-        });
-
-        cx.update(|cx| {
-            settings::SettingsStore::global(cx).update_settings_file::<AgentSettings>(
-                fs.clone(),
-                |settings, _| {
-                    *settings = AgentSettingsContent {
-                        inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                            default_model: Some(LanguageModelSelection {
-                                provider: "test-provider".into(),
-                                model: "gpt-99".into(),
-                            }),
-                            inline_assistant_model: None,
-                            commit_message_model: None,
-                            thread_summary_model: None,
-                            inline_alternatives: None,
-                            enabled: None,
-                            button: None,
-                            dock: None,
-                            default_width: None,
-                            default_height: None,
-                            default_profile: None,
-                            default_view: None,
-                            profiles: None,
-                            always_allow_tool_actions: None,
-                            play_sound_when_agent_done: None,
-                            notify_when_agent_waiting: None,
-                            stream_edits: None,
-                            single_file_review: None,
-                            enable_feedback: None,
-                            model_parameters: Vec::new(),
-                            preferred_completion_mode: None,
-                        })),
-                    }
-                },
-            );
-        });
-
-        cx.run_until_parked();
-
-        let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
-        assert!(raw_settings_value.contains(r#""version": "2""#));
-
-        #[derive(Debug, Deserialize)]
-        struct AgentSettingsTest {
-            agent: AgentSettingsContent,
-        }
-
-        let agent_settings: AgentSettingsTest =
-            serde_json_lenient::from_str(&raw_settings_value).unwrap();
-
-        assert!(!agent_settings.agent.is_version_outdated());
-    }
-
-    #[gpui::test]
-    async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
-        let fs = fs::FakeFs::new(cx.executor().clone());
-        fs.create_dir(paths::settings_file().parent().unwrap())
-            .await
-            .unwrap();
-
-        cx.update(|cx| {
-            let mut test_settings = settings::SettingsStore::test(cx);
-            let user_settings_content = r#"{
-            "assistant": {
-                "enabled": true,
-                "version": "2",
-                "default_model": {
-                  "provider": "zed.dev",
-                  "model": "gpt-99"
-                },
-            }}"#;
-            test_settings
-                .set_user_settings(user_settings_content, cx)
-                .unwrap();
-            cx.set_global(test_settings);
-            AgentSettings::register(cx);
-        });
-
-        cx.run_until_parked();
-
-        let agent_settings = cx.update(|cx| AgentSettings::get_global(cx).clone());
-        assert!(agent_settings.enabled);
-        assert!(!agent_settings.using_outdated_settings_version);
-        assert_eq!(agent_settings.default_model.model, "gpt-99");
-
-        cx.update_global::<SettingsStore, _>(|settings_store, cx| {
-            settings_store.update_user_settings::<AgentSettings>(cx, |settings| {
-                *settings = AgentSettingsContent {
-                    inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
-                        enabled: Some(false),
-                        default_model: Some(LanguageModelSelection {
-                            provider: "xai".to_owned().into(),
-                            model: "grok".to_owned(),
-                        }),
-                        ..Default::default()
-                    })),
-                };
-            });
-        });
-
-        cx.run_until_parked();
-
-        let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
-
-        #[derive(Debug, Deserialize)]
-        struct AgentSettingsTest {
-            assistant: AgentSettingsContent,
-            agent: Option<serde_json_lenient::Value>,
-        }
-
-        let agent_settings: AgentSettingsTest = serde_json::from_value(settings).unwrap();
-        assert!(agent_settings.agent.is_none());
-    }
-}

crates/agent_ui/src/agent_configuration/tool_picker.rs 🔗

@@ -272,42 +272,35 @@ impl PickerDelegate for ToolPickerDelegate {
             let server_id = server_id.clone();
             let tool_name = tool_name.clone();
             move |settings: &mut AgentSettingsContent, _cx| {
-                settings
-                    .v2_setting(|v2_settings| {
-                        let profiles = v2_settings.profiles.get_or_insert_default();
-                        let profile =
-                            profiles
-                                .entry(profile_id)
-                                .or_insert_with(|| AgentProfileContent {
-                                    name: default_profile.name.into(),
-                                    tools: default_profile.tools,
-                                    enable_all_context_servers: Some(
-                                        default_profile.enable_all_context_servers,
-                                    ),
-                                    context_servers: default_profile
-                                        .context_servers
-                                        .into_iter()
-                                        .map(|(server_id, preset)| {
-                                            (
-                                                server_id,
-                                                ContextServerPresetContent {
-                                                    tools: preset.tools,
-                                                },
-                                            )
-                                        })
-                                        .collect(),
-                                });
-
-                        if let Some(server_id) = server_id {
-                            let preset = profile.context_servers.entry(server_id).or_default();
-                            *preset.tools.entry(tool_name).or_default() = !is_currently_enabled;
-                        } else {
-                            *profile.tools.entry(tool_name).or_default() = !is_currently_enabled;
-                        }
-
-                        Ok(())
-                    })
-                    .ok();
+                let profiles = settings.profiles.get_or_insert_default();
+                let profile = profiles
+                    .entry(profile_id)
+                    .or_insert_with(|| AgentProfileContent {
+                        name: default_profile.name.into(),
+                        tools: default_profile.tools,
+                        enable_all_context_servers: Some(
+                            default_profile.enable_all_context_servers,
+                        ),
+                        context_servers: default_profile
+                            .context_servers
+                            .into_iter()
+                            .map(|(server_id, preset)| {
+                                (
+                                    server_id,
+                                    ContextServerPresetContent {
+                                        tools: preset.tools,
+                                    },
+                                )
+                            })
+                            .collect(),
+                    });
+
+                if let Some(server_id) = server_id {
+                    let preset = profile.context_servers.entry(server_id).or_default();
+                    *preset.tools.entry(tool_name).or_default() = !is_currently_enabled;
+                } else {
+                    *profile.tools.entry(tool_name).or_default() = !is_currently_enabled;
+                }
             }
         });
     }

crates/assistant_tools/src/edit_agent/evals.rs 🔗

@@ -1470,7 +1470,7 @@ impl EditAgentTest {
             Project::init_settings(cx);
             language::init(cx);
             language_model::init(client.clone(), cx);
-            language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
+            language_models::init(user_store.clone(), client.clone(), cx);
             crate::init(client.http_client(), cx);
         });
 

crates/eval/src/eval.rs 🔗

@@ -417,7 +417,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
     debug_adapter_extension::init(extension_host_proxy.clone(), cx);
     language_extension::init(extension_host_proxy.clone(), languages.clone());
     language_model::init(client.clone(), cx);
-    language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
+    language_models::init(user_store.clone(), client.clone(), cx);
     languages::init(languages.clone(), node_runtime.clone(), cx);
     prompt_store::init(cx);
     terminal_view::init(cx);

crates/language_models/Cargo.toml 🔗

@@ -42,7 +42,6 @@ open_ai = { workspace = true, features = ["schemars"] }
 open_router = { workspace = true, features = ["schemars"] }
 vercel = { workspace = true, features = ["schemars"] }
 partial-json-fixer.workspace = true
-project.workspace = true
 proto.workspace = true
 release_channel.workspace = true
 schemars.workspace = true

crates/language_models/src/language_models.rs 🔗

@@ -1,7 +1,6 @@
 use std::sync::Arc;
 
 use client::{Client, UserStore};
-use fs::Fs;
 use gpui::{App, Context, Entity};
 use language_model::LanguageModelRegistry;
 use provider::deepseek::DeepSeekLanguageModelProvider;
@@ -23,8 +22,8 @@ use crate::provider::open_router::OpenRouterLanguageModelProvider;
 use crate::provider::vercel::VercelLanguageModelProvider;
 pub use crate::settings::*;
 
-pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, fs: Arc<dyn Fs>, cx: &mut App) {
-    crate::settings::init(fs, cx);
+pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
+    crate::settings::init(cx);
     let registry = LanguageModelRegistry::global(cx);
     registry.update(cx, |registry, cx| {
         register_language_model_providers(registry, user_store, client, cx);

crates/language_models/src/provider/anthropic.rs 🔗

@@ -41,7 +41,6 @@ pub struct AnthropicSettings {
     pub api_url: String,
     /// Extend Zed's list of Anthropic models.
     pub available_models: Vec<AvailableModel>,
-    pub needs_setting_migration: bool,
 }
 
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

crates/language_models/src/provider/mistral.rs 🔗

@@ -36,7 +36,6 @@ const PROVIDER_NAME: &str = "Mistral";
 pub struct MistralSettings {
     pub api_url: String,
     pub available_models: Vec<AvailableModel>,
-    pub needs_setting_migration: bool,
 }
 
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

crates/language_models/src/provider/open_ai.rs 🔗

@@ -28,6 +28,7 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
 use ui_input::SingleLineInput;
 use util::ResultExt;
 
+use crate::OpenAiSettingsContent;
 use crate::{AllLanguageModelSettings, ui::InstructionListItem};
 
 const PROVIDER_ID: &str = "openai";
@@ -37,7 +38,6 @@ const PROVIDER_NAME: &str = "OpenAI";
 pub struct OpenAiSettings {
     pub api_url: String,
     pub available_models: Vec<AvailableModel>,
-    pub needs_setting_migration: bool,
 }
 
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
@@ -803,30 +803,13 @@ impl ConfigurationView {
         if !api_url.is_empty() && api_url != effective_current_url {
             let fs = <dyn Fs>::global(cx);
             update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
-                use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
-
-                if settings.openai.is_none() {
-                    settings.openai = Some(OpenAiSettingsContent::Versioned(
-                        VersionedOpenAiSettingsContent::V1(
-                            crate::settings::OpenAiSettingsContentV1 {
-                                api_url: Some(api_url.clone()),
-                                available_models: None,
-                            },
-                        ),
-                    ));
+                if let Some(settings) = settings.openai.as_mut() {
+                    settings.api_url = Some(api_url.clone());
                 } else {
-                    if let Some(openai) = settings.openai.as_mut() {
-                        match openai {
-                            OpenAiSettingsContent::Versioned(versioned) => match versioned {
-                                VersionedOpenAiSettingsContent::V1(v1) => {
-                                    v1.api_url = Some(api_url.clone());
-                                }
-                            },
-                            OpenAiSettingsContent::Legacy(legacy) => {
-                                legacy.api_url = Some(api_url.clone());
-                            }
-                        }
-                    }
+                    settings.openai = Some(OpenAiSettingsContent {
+                        api_url: Some(api_url.clone()),
+                        available_models: None,
+                    });
                 }
             });
         }
@@ -840,19 +823,8 @@ impl ConfigurationView {
         });
         let fs = <dyn Fs>::global(cx);
         update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
-            use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
-
-            if let Some(openai) = settings.openai.as_mut() {
-                match openai {
-                    OpenAiSettingsContent::Versioned(versioned) => match versioned {
-                        VersionedOpenAiSettingsContent::V1(v1) => {
-                            v1.api_url = None;
-                        }
-                    },
-                    OpenAiSettingsContent::Legacy(legacy) => {
-                        legacy.api_url = None;
-                    }
-                }
+            if let Some(settings) = settings.openai.as_mut() {
+                settings.api_url = None;
             }
         });
         cx.notify();

crates/language_models/src/provider/vercel.rs 🔗

@@ -32,7 +32,6 @@ const PROVIDER_NAME: &str = "Vercel";
 pub struct VercelSettings {
     pub api_url: String,
     pub available_models: Vec<AvailableModel>,
-    pub needs_setting_migration: bool,
 }
 
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]

crates/language_models/src/settings.rs 🔗

@@ -1,12 +1,8 @@
-use std::sync::Arc;
-
 use anyhow::Result;
 use gpui::App;
-use language_model::LanguageModelCacheConfiguration;
-use project::Fs;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize};
-use settings::{Settings, SettingsSources, update_settings_file};
+use settings::{Settings, SettingsSources};
 
 use crate::provider::{
     self,
@@ -24,36 +20,8 @@ use crate::provider::{
 };
 
 /// Initializes the language model settings.
-pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
+pub fn init(cx: &mut App) {
     AllLanguageModelSettings::register(cx);
-
-    if AllLanguageModelSettings::get_global(cx)
-        .openai
-        .needs_setting_migration
-    {
-        update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
-            if let Some(settings) = setting.openai.clone() {
-                let (newest_version, _) = settings.upgrade();
-                setting.openai = Some(OpenAiSettingsContent::Versioned(
-                    VersionedOpenAiSettingsContent::V1(newest_version),
-                ));
-            }
-        });
-    }
-
-    if AllLanguageModelSettings::get_global(cx)
-        .anthropic
-        .needs_setting_migration
-    {
-        update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
-            if let Some(settings) = setting.anthropic.clone() {
-                let (newest_version, _) = settings.upgrade();
-                setting.anthropic = Some(AnthropicSettingsContent::Versioned(
-                    VersionedAnthropicSettingsContent::V1(newest_version),
-                ));
-            }
-        });
-    }
 }
 
 #[derive(Default)]
@@ -90,78 +58,7 @@ pub struct AllLanguageModelSettingsContent {
 }
 
 #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-#[serde(untagged)]
-pub enum AnthropicSettingsContent {
-    Versioned(VersionedAnthropicSettingsContent),
-    Legacy(LegacyAnthropicSettingsContent),
-}
-
-impl AnthropicSettingsContent {
-    pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
-        match self {
-            AnthropicSettingsContent::Legacy(content) => (
-                AnthropicSettingsContentV1 {
-                    api_url: content.api_url,
-                    available_models: content.available_models.map(|models| {
-                        models
-                            .into_iter()
-                            .filter_map(|model| match model {
-                                anthropic::Model::Custom {
-                                    name,
-                                    display_name,
-                                    max_tokens,
-                                    tool_override,
-                                    cache_configuration,
-                                    max_output_tokens,
-                                    default_temperature,
-                                    extra_beta_headers,
-                                    mode,
-                                } => Some(provider::anthropic::AvailableModel {
-                                    name,
-                                    display_name,
-                                    max_tokens,
-                                    tool_override,
-                                    cache_configuration: cache_configuration.as_ref().map(
-                                        |config| LanguageModelCacheConfiguration {
-                                            max_cache_anchors: config.max_cache_anchors,
-                                            should_speculate: config.should_speculate,
-                                            min_total_token: config.min_total_token,
-                                        },
-                                    ),
-                                    max_output_tokens,
-                                    default_temperature,
-                                    extra_beta_headers,
-                                    mode: Some(mode.into()),
-                                }),
-                                _ => None,
-                            })
-                            .collect()
-                    }),
-                },
-                true,
-            ),
-            AnthropicSettingsContent::Versioned(content) => match content {
-                VersionedAnthropicSettingsContent::V1(content) => (content, false),
-            },
-        }
-    }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-pub struct LegacyAnthropicSettingsContent {
-    pub api_url: Option<String>,
-    pub available_models: Option<Vec<anthropic::Model>>,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-#[serde(tag = "version")]
-pub enum VersionedAnthropicSettingsContent {
-    #[serde(rename = "1")]
-    V1(AnthropicSettingsContentV1),
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-pub struct AnthropicSettingsContentV1 {
+pub struct AnthropicSettingsContent {
     pub api_url: Option<String>,
     pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
 }
@@ -200,64 +97,7 @@ pub struct MistralSettingsContent {
 }
 
 #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-#[serde(untagged)]
-pub enum OpenAiSettingsContent {
-    Versioned(VersionedOpenAiSettingsContent),
-    Legacy(LegacyOpenAiSettingsContent),
-}
-
-impl OpenAiSettingsContent {
-    pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
-        match self {
-            OpenAiSettingsContent::Legacy(content) => (
-                OpenAiSettingsContentV1 {
-                    api_url: content.api_url,
-                    available_models: content.available_models.map(|models| {
-                        models
-                            .into_iter()
-                            .filter_map(|model| match model {
-                                open_ai::Model::Custom {
-                                    name,
-                                    display_name,
-                                    max_tokens,
-                                    max_output_tokens,
-                                    max_completion_tokens,
-                                } => Some(provider::open_ai::AvailableModel {
-                                    name,
-                                    max_tokens,
-                                    max_output_tokens,
-                                    display_name,
-                                    max_completion_tokens,
-                                }),
-                                _ => None,
-                            })
-                            .collect()
-                    }),
-                },
-                true,
-            ),
-            OpenAiSettingsContent::Versioned(content) => match content {
-                VersionedOpenAiSettingsContent::V1(content) => (content, false),
-            },
-        }
-    }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-pub struct LegacyOpenAiSettingsContent {
-    pub api_url: Option<String>,
-    pub available_models: Option<Vec<open_ai::Model>>,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-#[serde(tag = "version")]
-pub enum VersionedOpenAiSettingsContent {
-    #[serde(rename = "1")]
-    V1(OpenAiSettingsContentV1),
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
-pub struct OpenAiSettingsContentV1 {
+pub struct OpenAiSettingsContent {
     pub api_url: Option<String>,
     pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
 }
@@ -303,15 +143,7 @@ impl settings::Settings for AllLanguageModelSettings {
 
         for value in sources.defaults_and_customizations() {
             // Anthropic
-            let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
-                Some((content, upgraded)) => (Some(content), upgraded),
-                None => (None, false),
-            };
-
-            if upgraded {
-                settings.anthropic.needs_setting_migration = true;
-            }
-
+            let anthropic = value.anthropic.clone();
             merge(
                 &mut settings.anthropic.api_url,
                 anthropic.as_ref().and_then(|s| s.api_url.clone()),
@@ -377,15 +209,7 @@ impl settings::Settings for AllLanguageModelSettings {
             );
 
             // OpenAI
-            let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
-                Some((content, upgraded)) => (Some(content), upgraded),
-                None => (None, false),
-            };
-
-            if upgraded {
-                settings.openai.needs_setting_migration = true;
-            }
-
+            let openai = value.openai.clone();
             merge(
                 &mut settings.openai.api_url,
                 openai.as_ref().and_then(|s| s.api_url.clone()),

crates/migrator/src/migrations.rs 🔗

@@ -81,3 +81,9 @@ pub(crate) mod m_2025_06_16 {
 
     pub(crate) use settings::SETTINGS_PATTERNS;
 }
+
+pub(crate) mod m_2025_06_25 {
+    mod settings;
+
+    pub(crate) use settings::SETTINGS_PATTERNS;
+}

crates/migrator/src/migrations/m_2025_06_25/settings.rs 🔗

@@ -0,0 +1,133 @@
+use std::ops::Range;
+use tree_sitter::{Query, QueryMatch};
+
+use crate::MigrationPatterns;
+
+pub const SETTINGS_PATTERNS: MigrationPatterns = &[
+    (SETTINGS_VERSION_PATTERN, remove_version_fields),
+    (
+        SETTINGS_NESTED_VERSION_PATTERN,
+        remove_nested_version_fields,
+    ),
+];
+
+const SETTINGS_VERSION_PATTERN: &str = r#"(document
+    (object
+        (pair
+            key: (string (string_content) @key)
+            value: (object
+                (pair
+                    key: (string (string_content) @version_key)
+                    value: (_) @version_value
+                ) @version_pair
+            )
+        )
+    )
+    (#eq? @key "agent")
+    (#eq? @version_key "version")
+)"#;
+
+const SETTINGS_NESTED_VERSION_PATTERN: &str = r#"(document
+    (object
+        (pair
+            key: (string (string_content) @language_models)
+            value: (object
+                (pair
+                    key: (string (string_content) @provider)
+                    value: (object
+                        (pair
+                            key: (string (string_content) @version_key)
+                            value: (_) @version_value
+                        ) @version_pair
+                    )
+                )
+            )
+        )
+    )
+    (#eq? @language_models "language_models")
+    (#match? @provider "^(anthropic|openai)$")
+    (#eq? @version_key "version")
+)"#;
+
+fn remove_version_fields(
+    contents: &str,
+    mat: &QueryMatch,
+    query: &Query,
+) -> Option<(Range<usize>, String)> {
+    let version_pair_ix = query.capture_index_for_name("version_pair")?;
+    let version_pair_node = mat.nodes_for_capture_index(version_pair_ix).next()?;
+
+    remove_pair_with_whitespace(contents, version_pair_node)
+}
+
+fn remove_nested_version_fields(
+    contents: &str,
+    mat: &QueryMatch,
+    query: &Query,
+) -> Option<(Range<usize>, String)> {
+    let version_pair_ix = query.capture_index_for_name("version_pair")?;
+    let version_pair_node = mat.nodes_for_capture_index(version_pair_ix).next()?;
+
+    remove_pair_with_whitespace(contents, version_pair_node)
+}
+
+fn remove_pair_with_whitespace(
+    contents: &str,
+    pair_node: tree_sitter::Node,
+) -> Option<(Range<usize>, String)> {
+    let mut range_to_remove = pair_node.byte_range();
+
+    // Check if there's a comma after this pair
+    if let Some(next_sibling) = pair_node.next_sibling() {
+        if next_sibling.kind() == "," {
+            range_to_remove.end = next_sibling.end_byte();
+        }
+    } else {
+        // If no next sibling, check if there's a comma before
+        if let Some(prev_sibling) = pair_node.prev_sibling() {
+            if prev_sibling.kind() == "," {
+                range_to_remove.start = prev_sibling.start_byte();
+            }
+        }
+    }
+
+    // Include any leading whitespace/newline, including comments
+    let text_before = &contents[..range_to_remove.start];
+    if let Some(last_newline) = text_before.rfind('\n') {
+        let whitespace_start = last_newline + 1;
+        let potential_whitespace = &contents[whitespace_start..range_to_remove.start];
+
+        // Check if it's only whitespace or comments
+        let mut is_whitespace_or_comment = true;
+        let mut in_comment = false;
+        let mut chars = potential_whitespace.chars().peekable();
+
+        while let Some(ch) = chars.next() {
+            if in_comment {
+                if ch == '\n' {
+                    in_comment = false;
+                }
+            } else if ch == '/' && chars.peek() == Some(&'/') {
+                in_comment = true;
+                chars.next(); // Skip the second '/'
+            } else if !ch.is_whitespace() {
+                is_whitespace_or_comment = false;
+                break;
+            }
+        }
+
+        if is_whitespace_or_comment {
+            range_to_remove.start = whitespace_start;
+        }
+    }
+
+    // Also check if we need to include trailing whitespace up to the next line
+    let text_after = &contents[range_to_remove.end..];
+    if let Some(newline_pos) = text_after.find('\n') {
+        if text_after[..newline_pos].chars().all(|c| c.is_whitespace()) {
+            range_to_remove.end += newline_pos + 1;
+        }
+    }
+
+    Some((range_to_remove, String::new()))
+}

crates/migrator/src/migrator.rs 🔗

@@ -152,6 +152,10 @@ pub fn migrate_settings(text: &str) -> Result<Option<String>> {
             migrations::m_2025_06_16::SETTINGS_PATTERNS,
             &SETTINGS_QUERY_2025_06_16,
         ),
+        (
+            migrations::m_2025_06_25::SETTINGS_PATTERNS,
+            &SETTINGS_QUERY_2025_06_25,
+        ),
     ];
     run_migrations(text, migrations)
 }
@@ -254,6 +258,10 @@ define_query!(
     SETTINGS_QUERY_2025_06_16,
     migrations::m_2025_06_16::SETTINGS_PATTERNS
 );
+define_query!(
+    SETTINGS_QUERY_2025_06_25,
+    migrations::m_2025_06_25::SETTINGS_PATTERNS
+);
 
 // custom query
 static EDIT_PREDICTION_SETTINGS_MIGRATION_QUERY: LazyLock<Query> = LazyLock::new(|| {
@@ -1052,4 +1060,75 @@ mod tests {
 }"#;
         assert_migrate_settings(settings, None);
     }
+
+    #[test]
+    fn test_remove_version_fields() {
+        assert_migrate_settings(
+            r#"{
+    "language_models": {
+        "anthropic": {
+            "version": "1",
+            "api_url": "https://api.anthropic.com"
+        },
+        "openai": {
+            "version": "1",
+            "api_url": "https://api.openai.com/v1"
+        }
+    },
+    "agent": {
+        "version": "2",
+        "enabled": true,
+        "preferred_completion_mode": "normal",
+        "button": true,
+        "dock": "right",
+        "default_width": 640,
+        "default_height": 320,
+        "default_model": {
+            "provider": "zed.dev",
+            "model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            Some(
+                r#"{
+    "language_models": {
+        "anthropic": {
+            "api_url": "https://api.anthropic.com"
+        },
+        "openai": {
+            "api_url": "https://api.openai.com/v1"
+        }
+    },
+    "agent": {
+        "enabled": true,
+        "preferred_completion_mode": "normal",
+        "button": true,
+        "dock": "right",
+        "default_width": 640,
+        "default_height": 320,
+        "default_model": {
+            "provider": "zed.dev",
+            "model": "claude-sonnet-4"
+        }
+    }
+}"#,
+            ),
+        );
+
+        // Test that version fields in other contexts are not removed
+        assert_migrate_settings(
+            r#"{
+    "language_models": {
+        "other_provider": {
+            "version": "1",
+            "api_url": "https://api.example.com"
+        }
+    },
+    "other_section": {
+        "version": "1"
+    }
+}"#,
+            None,
+        );
+    }
 }

crates/zed/src/main.rs 🔗

@@ -516,12 +516,7 @@ pub fn main() {
         );
         supermaven::init(app_state.client.clone(), cx);
         language_model::init(app_state.client.clone(), cx);
-        language_models::init(
-            app_state.user_store.clone(),
-            app_state.client.clone(),
-            app_state.fs.clone(),
-            cx,
-        );
+        language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
         web_search::init(cx);
         web_search_providers::init(app_state.client.clone(), cx);
         snippet_provider::init(cx);

crates/zed/src/zed.rs 🔗

@@ -4441,12 +4441,7 @@ mod tests {
             );
             image_viewer::init(cx);
             language_model::init(app_state.client.clone(), cx);
-            language_models::init(
-                app_state.user_store.clone(),
-                app_state.client.clone(),
-                app_state.fs.clone(),
-                cx,
-            );
+            language_models::init(app_state.user_store.clone(), app_state.client.clone(), cx);
             web_search::init(cx);
             web_search_providers::init(app_state.client.clone(), cx);
             let prompt_builder = PromptBuilder::load(app_state.fs.clone(), false, cx);