assistant_settings.rs

   1mod agent_profile;
   2
   3use std::sync::Arc;
   4
   5use ::open_ai::Model as OpenAiModel;
   6use anthropic::Model as AnthropicModel;
   7use anyhow::{Result, bail};
   8use collections::IndexMap;
   9use deepseek::Model as DeepseekModel;
  10use gpui::{App, Pixels, SharedString};
  11use language_model::{CloudModel, LanguageModel};
  12use lmstudio::Model as LmStudioModel;
  13use ollama::Model as OllamaModel;
  14use schemars::{JsonSchema, schema::Schema};
  15use serde::{Deserialize, Serialize};
  16use settings::{Settings, SettingsSources};
  17
  18pub use crate::agent_profile::*;
  19
  20pub fn init(cx: &mut App) {
  21    AssistantSettings::register(cx);
  22}
  23
  24#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
  25#[serde(rename_all = "snake_case")]
  26pub enum AssistantDockPosition {
  27    Left,
  28    #[default]
  29    Right,
  30    Bottom,
  31}
  32
  33#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
  34#[serde(rename_all = "snake_case")]
  35pub enum NotifyWhenAgentWaiting {
  36    #[default]
  37    PrimaryScreen,
  38    AllScreens,
  39    Never,
  40}
  41
  42#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
  43#[serde(tag = "name", rename_all = "snake_case")]
  44pub enum AssistantProviderContentV1 {
  45    #[serde(rename = "zed.dev")]
  46    ZedDotDev { default_model: Option<CloudModel> },
  47    #[serde(rename = "openai")]
  48    OpenAi {
  49        default_model: Option<OpenAiModel>,
  50        api_url: Option<String>,
  51        available_models: Option<Vec<OpenAiModel>>,
  52    },
  53    #[serde(rename = "anthropic")]
  54    Anthropic {
  55        default_model: Option<AnthropicModel>,
  56        api_url: Option<String>,
  57    },
  58    #[serde(rename = "ollama")]
  59    Ollama {
  60        default_model: Option<OllamaModel>,
  61        api_url: Option<String>,
  62    },
  63    #[serde(rename = "lmstudio")]
  64    LmStudio {
  65        default_model: Option<LmStudioModel>,
  66        api_url: Option<String>,
  67    },
  68    #[serde(rename = "deepseek")]
  69    DeepSeek {
  70        default_model: Option<DeepseekModel>,
  71        api_url: Option<String>,
  72    },
  73}
  74
  75#[derive(Default, Clone, Debug)]
  76pub struct AssistantSettings {
  77    pub enabled: bool,
  78    pub button: bool,
  79    pub dock: AssistantDockPosition,
  80    pub default_width: Pixels,
  81    pub default_height: Pixels,
  82    pub default_model: LanguageModelSelection,
  83    pub inline_assistant_model: Option<LanguageModelSelection>,
  84    pub commit_message_model: Option<LanguageModelSelection>,
  85    pub thread_summary_model: Option<LanguageModelSelection>,
  86    pub inline_alternatives: Vec<LanguageModelSelection>,
  87    pub using_outdated_settings_version: bool,
  88    pub default_profile: AgentProfileId,
  89    pub profiles: IndexMap<AgentProfileId, AgentProfile>,
  90    pub always_allow_tool_actions: bool,
  91    pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
  92    pub stream_edits: bool,
  93    pub single_file_review: bool,
  94    pub model_parameters: Vec<LanguageModelParameters>,
  95    pub preferred_completion_mode: CompletionMode,
  96}
  97
  98impl AssistantSettings {
  99    pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
 100        let settings = Self::get_global(cx);
 101        settings
 102            .model_parameters
 103            .iter()
 104            .rfind(|setting| setting.matches(model))
 105            .and_then(|m| m.temperature)
 106    }
 107
 108    pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
 109        self.inline_assistant_model = Some(LanguageModelSelection {
 110            provider: provider.into(),
 111            model,
 112        });
 113    }
 114
 115    pub fn set_commit_message_model(&mut self, provider: String, model: String) {
 116        self.commit_message_model = Some(LanguageModelSelection {
 117            provider: provider.into(),
 118            model,
 119        });
 120    }
 121
 122    pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
 123        self.thread_summary_model = Some(LanguageModelSelection {
 124            provider: provider.into(),
 125            model,
 126        });
 127    }
 128}
 129
 130#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
 131pub struct LanguageModelParameters {
 132    pub provider: Option<LanguageModelProviderSetting>,
 133    pub model: Option<SharedString>,
 134    pub temperature: Option<f32>,
 135}
 136
 137impl LanguageModelParameters {
 138    pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
 139        if let Some(provider) = &self.provider {
 140            if provider.0 != model.provider_id().0 {
 141                return false;
 142            }
 143        }
 144        if let Some(setting_model) = &self.model {
 145            if *setting_model != model.id().0 {
 146                return false;
 147            }
 148        }
 149        true
 150    }
 151}
 152
 153/// Assistant panel settings
 154#[derive(Clone, Serialize, Deserialize, Debug, Default)]
 155pub struct AssistantSettingsContent {
 156    #[serde(flatten)]
 157    pub inner: Option<AssistantSettingsContentInner>,
 158}
 159
 160#[derive(Clone, Serialize, Deserialize, Debug)]
 161#[serde(untagged)]
 162pub enum AssistantSettingsContentInner {
 163    Versioned(Box<VersionedAssistantSettingsContent>),
 164    Legacy(LegacyAssistantSettingsContent),
 165}
 166
 167impl AssistantSettingsContentInner {
 168    fn for_v2(content: AssistantSettingsContentV2) -> Self {
 169        AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
 170            content,
 171        )))
 172    }
 173}
 174
 175impl JsonSchema for AssistantSettingsContent {
 176    fn schema_name() -> String {
 177        VersionedAssistantSettingsContent::schema_name()
 178    }
 179
 180    fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
 181        VersionedAssistantSettingsContent::json_schema(r#gen)
 182    }
 183
 184    fn is_referenceable() -> bool {
 185        VersionedAssistantSettingsContent::is_referenceable()
 186    }
 187}
 188
 189impl AssistantSettingsContent {
 190    pub fn is_version_outdated(&self) -> bool {
 191        match &self.inner {
 192            Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
 193                VersionedAssistantSettingsContent::V1(_) => true,
 194                VersionedAssistantSettingsContent::V2(_) => false,
 195            },
 196            Some(AssistantSettingsContentInner::Legacy(_)) => true,
 197            None => false,
 198        }
 199    }
 200
 201    fn upgrade(&self) -> AssistantSettingsContentV2 {
 202        match &self.inner {
 203            Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
 204                VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
 205                    enabled: settings.enabled,
 206                    button: settings.button,
 207                    dock: settings.dock,
 208                    default_width: settings.default_width,
 209                    default_height: settings.default_width,
 210                    default_model: settings
 211                        .provider
 212                        .clone()
 213                        .and_then(|provider| match provider {
 214                            AssistantProviderContentV1::ZedDotDev { default_model } => {
 215                                default_model.map(|model| LanguageModelSelection {
 216                                    provider: "zed.dev".into(),
 217                                    model: model.id().to_string(),
 218                                })
 219                            }
 220                            AssistantProviderContentV1::OpenAi { default_model, .. } => {
 221                                default_model.map(|model| LanguageModelSelection {
 222                                    provider: "openai".into(),
 223                                    model: model.id().to_string(),
 224                                })
 225                            }
 226                            AssistantProviderContentV1::Anthropic { default_model, .. } => {
 227                                default_model.map(|model| LanguageModelSelection {
 228                                    provider: "anthropic".into(),
 229                                    model: model.id().to_string(),
 230                                })
 231                            }
 232                            AssistantProviderContentV1::Ollama { default_model, .. } => {
 233                                default_model.map(|model| LanguageModelSelection {
 234                                    provider: "ollama".into(),
 235                                    model: model.id().to_string(),
 236                                })
 237                            }
 238                            AssistantProviderContentV1::LmStudio { default_model, .. } => {
 239                                default_model.map(|model| LanguageModelSelection {
 240                                    provider: "lmstudio".into(),
 241                                    model: model.id().to_string(),
 242                                })
 243                            }
 244                            AssistantProviderContentV1::DeepSeek { default_model, .. } => {
 245                                default_model.map(|model| LanguageModelSelection {
 246                                    provider: "deepseek".into(),
 247                                    model: model.id().to_string(),
 248                                })
 249                            }
 250                        }),
 251                    inline_assistant_model: None,
 252                    commit_message_model: None,
 253                    thread_summary_model: None,
 254                    inline_alternatives: None,
 255                    default_profile: None,
 256                    profiles: None,
 257                    always_allow_tool_actions: None,
 258                    notify_when_agent_waiting: None,
 259                    stream_edits: None,
 260                    single_file_review: None,
 261                    model_parameters: Vec::new(),
 262                    preferred_completion_mode: None,
 263                },
 264                VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
 265            },
 266            Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
 267                enabled: None,
 268                button: settings.button,
 269                dock: settings.dock,
 270                default_width: settings.default_width,
 271                default_height: settings.default_height,
 272                default_model: Some(LanguageModelSelection {
 273                    provider: "openai".into(),
 274                    model: settings
 275                        .default_open_ai_model
 276                        .clone()
 277                        .unwrap_or_default()
 278                        .id()
 279                        .to_string(),
 280                }),
 281                inline_assistant_model: None,
 282                commit_message_model: None,
 283                thread_summary_model: None,
 284                inline_alternatives: None,
 285                default_profile: None,
 286                profiles: None,
 287                always_allow_tool_actions: None,
 288                notify_when_agent_waiting: None,
 289                stream_edits: None,
 290                single_file_review: None,
 291                model_parameters: Vec::new(),
 292                preferred_completion_mode: None,
 293            },
 294            None => AssistantSettingsContentV2::default(),
 295        }
 296    }
 297
 298    pub fn set_dock(&mut self, dock: AssistantDockPosition) {
 299        match &mut self.inner {
 300            Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
 301                VersionedAssistantSettingsContent::V1(ref mut settings) => {
 302                    settings.dock = Some(dock);
 303                }
 304                VersionedAssistantSettingsContent::V2(ref mut settings) => {
 305                    settings.dock = Some(dock);
 306                }
 307            },
 308            Some(AssistantSettingsContentInner::Legacy(settings)) => {
 309                settings.dock = Some(dock);
 310            }
 311            None => {
 312                self.inner = Some(AssistantSettingsContentInner::for_v2(
 313                    AssistantSettingsContentV2 {
 314                        dock: Some(dock),
 315                        ..Default::default()
 316                    },
 317                ))
 318            }
 319        }
 320    }
 321
 322    pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
 323        let model = language_model.id().0.to_string();
 324        let provider = language_model.provider_id().0.to_string();
 325
 326        match &mut self.inner {
 327            Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
 328                VersionedAssistantSettingsContent::V1(ref mut settings) => {
 329                    match provider.as_ref() {
 330                        "zed.dev" => {
 331                            log::warn!("attempted to set zed.dev model on outdated settings");
 332                        }
 333                        "anthropic" => {
 334                            let api_url = match &settings.provider {
 335                                Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
 336                                    api_url.clone()
 337                                }
 338                                _ => None,
 339                            };
 340                            settings.provider = Some(AssistantProviderContentV1::Anthropic {
 341                                default_model: AnthropicModel::from_id(&model).ok(),
 342                                api_url,
 343                            });
 344                        }
 345                        "ollama" => {
 346                            let api_url = match &settings.provider {
 347                                Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
 348                                    api_url.clone()
 349                                }
 350                                _ => None,
 351                            };
 352                            settings.provider = Some(AssistantProviderContentV1::Ollama {
 353                                default_model: Some(ollama::Model::new(
 354                                    &model,
 355                                    None,
 356                                    None,
 357                                    Some(language_model.supports_tools()),
 358                                )),
 359                                api_url,
 360                            });
 361                        }
 362                        "lmstudio" => {
 363                            let api_url = match &settings.provider {
 364                                Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
 365                                    api_url.clone()
 366                                }
 367                                _ => None,
 368                            };
 369                            settings.provider = Some(AssistantProviderContentV1::LmStudio {
 370                                default_model: Some(lmstudio::Model::new(&model, None, None)),
 371                                api_url,
 372                            });
 373                        }
 374                        "openai" => {
 375                            let (api_url, available_models) = match &settings.provider {
 376                                Some(AssistantProviderContentV1::OpenAi {
 377                                    api_url,
 378                                    available_models,
 379                                    ..
 380                                }) => (api_url.clone(), available_models.clone()),
 381                                _ => (None, None),
 382                            };
 383                            settings.provider = Some(AssistantProviderContentV1::OpenAi {
 384                                default_model: OpenAiModel::from_id(&model).ok(),
 385                                api_url,
 386                                available_models,
 387                            });
 388                        }
 389                        "deepseek" => {
 390                            let api_url = match &settings.provider {
 391                                Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
 392                                    api_url.clone()
 393                                }
 394                                _ => None,
 395                            };
 396                            settings.provider = Some(AssistantProviderContentV1::DeepSeek {
 397                                default_model: DeepseekModel::from_id(&model).ok(),
 398                                api_url,
 399                            });
 400                        }
 401                        _ => {}
 402                    }
 403                }
 404                VersionedAssistantSettingsContent::V2(ref mut settings) => {
 405                    settings.default_model = Some(LanguageModelSelection {
 406                        provider: provider.into(),
 407                        model,
 408                    });
 409                }
 410            },
 411            Some(AssistantSettingsContentInner::Legacy(settings)) => {
 412                if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
 413                    settings.default_open_ai_model = Some(model);
 414                }
 415            }
 416            None => {
 417                self.inner = Some(AssistantSettingsContentInner::for_v2(
 418                    AssistantSettingsContentV2 {
 419                        default_model: Some(LanguageModelSelection {
 420                            provider: provider.into(),
 421                            model,
 422                        }),
 423                        ..Default::default()
 424                    },
 425                ));
 426            }
 427        }
 428    }
 429
 430    pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
 431        self.v2_setting(|setting| {
 432            setting.inline_assistant_model = Some(LanguageModelSelection {
 433                provider: provider.into(),
 434                model,
 435            });
 436            Ok(())
 437        })
 438        .ok();
 439    }
 440
 441    pub fn set_commit_message_model(&mut self, provider: String, model: String) {
 442        self.v2_setting(|setting| {
 443            setting.commit_message_model = Some(LanguageModelSelection {
 444                provider: provider.into(),
 445                model,
 446            });
 447            Ok(())
 448        })
 449        .ok();
 450    }
 451
 452    pub fn v2_setting(
 453        &mut self,
 454        f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
 455    ) -> anyhow::Result<()> {
 456        match self.inner.get_or_insert_with(|| {
 457            AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
 458                ..Default::default()
 459            })
 460        }) {
 461            AssistantSettingsContentInner::Versioned(boxed) => {
 462                if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
 463                    f(settings)
 464                } else {
 465                    Ok(())
 466                }
 467            }
 468            _ => Ok(()),
 469        }
 470    }
 471
 472    pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
 473        self.v2_setting(|setting| {
 474            setting.thread_summary_model = Some(LanguageModelSelection {
 475                provider: provider.into(),
 476                model,
 477            });
 478            Ok(())
 479        })
 480        .ok();
 481    }
 482
 483    pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
 484        self.v2_setting(|setting| {
 485            setting.always_allow_tool_actions = Some(allow);
 486            Ok(())
 487        })
 488        .ok();
 489    }
 490
 491    pub fn set_single_file_review(&mut self, allow: bool) {
 492        self.v2_setting(|setting| {
 493            setting.single_file_review = Some(allow);
 494            Ok(())
 495        })
 496        .ok();
 497    }
 498
 499    pub fn set_profile(&mut self, profile_id: AgentProfileId) {
 500        self.v2_setting(|setting| {
 501            setting.default_profile = Some(profile_id);
 502            Ok(())
 503        })
 504        .ok();
 505    }
 506
 507    pub fn create_profile(
 508        &mut self,
 509        profile_id: AgentProfileId,
 510        profile: AgentProfile,
 511    ) -> Result<()> {
 512        self.v2_setting(|settings| {
 513            let profiles = settings.profiles.get_or_insert_default();
 514            if profiles.contains_key(&profile_id) {
 515                bail!("profile with ID '{profile_id}' already exists");
 516            }
 517
 518            profiles.insert(
 519                profile_id,
 520                AgentProfileContent {
 521                    name: profile.name.into(),
 522                    tools: profile.tools,
 523                    enable_all_context_servers: Some(profile.enable_all_context_servers),
 524                    context_servers: profile
 525                        .context_servers
 526                        .into_iter()
 527                        .map(|(server_id, preset)| {
 528                            (
 529                                server_id,
 530                                ContextServerPresetContent {
 531                                    tools: preset.tools,
 532                                },
 533                            )
 534                        })
 535                        .collect(),
 536                },
 537            );
 538
 539            Ok(())
 540        })
 541    }
 542}
 543
 544#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
 545#[serde(tag = "version")]
 546pub enum VersionedAssistantSettingsContent {
 547    #[serde(rename = "1")]
 548    V1(AssistantSettingsContentV1),
 549    #[serde(rename = "2")]
 550    V2(AssistantSettingsContentV2),
 551}
 552
 553impl Default for VersionedAssistantSettingsContent {
 554    fn default() -> Self {
 555        Self::V2(AssistantSettingsContentV2 {
 556            enabled: None,
 557            button: None,
 558            dock: None,
 559            default_width: None,
 560            default_height: None,
 561            default_model: None,
 562            inline_assistant_model: None,
 563            commit_message_model: None,
 564            thread_summary_model: None,
 565            inline_alternatives: None,
 566            default_profile: None,
 567            profiles: None,
 568            always_allow_tool_actions: None,
 569            notify_when_agent_waiting: None,
 570            stream_edits: None,
 571            single_file_review: None,
 572            model_parameters: Vec::new(),
 573            preferred_completion_mode: None,
 574        })
 575    }
 576}
 577
 578#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
 579pub struct AssistantSettingsContentV2 {
 580    /// Whether the Assistant is enabled.
 581    ///
 582    /// Default: true
 583    enabled: Option<bool>,
 584    /// Whether to show the assistant panel button in the status bar.
 585    ///
 586    /// Default: true
 587    button: Option<bool>,
 588    /// Where to dock the assistant.
 589    ///
 590    /// Default: right
 591    dock: Option<AssistantDockPosition>,
 592    /// Default width in pixels when the assistant is docked to the left or right.
 593    ///
 594    /// Default: 640
 595    default_width: Option<f32>,
 596    /// Default height in pixels when the assistant is docked to the bottom.
 597    ///
 598    /// Default: 320
 599    default_height: Option<f32>,
 600    /// The default model to use when creating new chats and for other features when a specific model is not specified.
 601    default_model: Option<LanguageModelSelection>,
 602    /// Model to use for the inline assistant. Defaults to default_model when not specified.
 603    inline_assistant_model: Option<LanguageModelSelection>,
 604    /// Model to use for generating git commit messages. Defaults to default_model when not specified.
 605    commit_message_model: Option<LanguageModelSelection>,
 606    /// Model to use for generating thread summaries. Defaults to default_model when not specified.
 607    thread_summary_model: Option<LanguageModelSelection>,
 608    /// Additional models with which to generate alternatives when performing inline assists.
 609    inline_alternatives: Option<Vec<LanguageModelSelection>>,
 610    /// The default profile to use in the Agent.
 611    ///
 612    /// Default: write
 613    default_profile: Option<AgentProfileId>,
 614    /// The available agent profiles.
 615    pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
 616    /// Whenever a tool action would normally wait for your confirmation
 617    /// that you allow it, always choose to allow it.
 618    ///
 619    /// Default: false
 620    always_allow_tool_actions: Option<bool>,
 621    /// Where to show a popup notification when the agent is waiting for user input.
 622    ///
 623    /// Default: "primary_screen"
 624    notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
 625    /// Whether to stream edits from the agent as they are received.
 626    ///
 627    /// Default: false
 628    stream_edits: Option<bool>,
 629    /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
 630    ///
 631    /// Default: true
 632    single_file_review: Option<bool>,
 633    /// Additional parameters for language model requests. When making a request
 634    /// to a model, parameters will be taken from the last entry in this list
 635    /// that matches the model's provider and name. In each entry, both provider
 636    /// and model are optional, so that you can specify parameters for either
 637    /// one.
 638    ///
 639    /// Default: []
 640    #[serde(default)]
 641    model_parameters: Vec<LanguageModelParameters>,
 642
 643    /// What completion mode to enable for new threads
 644    ///
 645    /// Default: normal
 646    preferred_completion_mode: Option<CompletionMode>,
 647}
 648
 649#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
 650#[serde(rename_all = "snake_case")]
 651pub enum CompletionMode {
 652    #[default]
 653    Normal,
 654    Max,
 655}
 656
 657impl From<CompletionMode> for zed_llm_client::CompletionMode {
 658    fn from(value: CompletionMode) -> Self {
 659        match value {
 660            CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
 661            CompletionMode::Max => zed_llm_client::CompletionMode::Max,
 662        }
 663    }
 664}
 665
 666#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
 667pub struct LanguageModelSelection {
 668    pub provider: LanguageModelProviderSetting,
 669    pub model: String,
 670}
 671
 672#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
 673pub struct LanguageModelProviderSetting(pub String);
 674
 675impl JsonSchema for LanguageModelProviderSetting {
 676    fn schema_name() -> String {
 677        "LanguageModelProviderSetting".into()
 678    }
 679
 680    fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
 681        schemars::schema::SchemaObject {
 682            enum_values: Some(vec![
 683                "anthropic".into(),
 684                "bedrock".into(),
 685                "google".into(),
 686                "lmstudio".into(),
 687                "ollama".into(),
 688                "openai".into(),
 689                "zed.dev".into(),
 690                "copilot_chat".into(),
 691                "deepseek".into(),
 692            ]),
 693            ..Default::default()
 694        }
 695        .into()
 696    }
 697}
 698
 699impl From<String> for LanguageModelProviderSetting {
 700    fn from(provider: String) -> Self {
 701        Self(provider)
 702    }
 703}
 704
 705impl From<&str> for LanguageModelProviderSetting {
 706    fn from(provider: &str) -> Self {
 707        Self(provider.to_string())
 708    }
 709}
 710
 711impl Default for LanguageModelSelection {
 712    fn default() -> Self {
 713        Self {
 714            provider: LanguageModelProviderSetting("openai".to_string()),
 715            model: "gpt-4".to_string(),
 716        }
 717    }
 718}
 719
 720#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
 721pub struct AgentProfileContent {
 722    pub name: Arc<str>,
 723    #[serde(default)]
 724    pub tools: IndexMap<Arc<str>, bool>,
 725    /// Whether all context servers are enabled by default.
 726    pub enable_all_context_servers: Option<bool>,
 727    #[serde(default)]
 728    pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
 729}
 730
 731#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
 732pub struct ContextServerPresetContent {
 733    pub tools: IndexMap<Arc<str>, bool>,
 734}
 735
 736#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
 737pub struct AssistantSettingsContentV1 {
 738    /// Whether the Assistant is enabled.
 739    ///
 740    /// Default: true
 741    enabled: Option<bool>,
 742    /// Whether to show the assistant panel button in the status bar.
 743    ///
 744    /// Default: true
 745    button: Option<bool>,
 746    /// Where to dock the assistant.
 747    ///
 748    /// Default: right
 749    dock: Option<AssistantDockPosition>,
 750    /// Default width in pixels when the assistant is docked to the left or right.
 751    ///
 752    /// Default: 640
 753    default_width: Option<f32>,
 754    /// Default height in pixels when the assistant is docked to the bottom.
 755    ///
 756    /// Default: 320
 757    default_height: Option<f32>,
 758    /// The provider of the assistant service.
 759    ///
 760    /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
 761    /// each with their respective default models and configurations.
 762    provider: Option<AssistantProviderContentV1>,
 763}
 764
 765#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
 766pub struct LegacyAssistantSettingsContent {
 767    /// Whether to show the assistant panel button in the status bar.
 768    ///
 769    /// Default: true
 770    pub button: Option<bool>,
 771    /// Where to dock the assistant.
 772    ///
 773    /// Default: right
 774    pub dock: Option<AssistantDockPosition>,
 775    /// Default width in pixels when the assistant is docked to the left or right.
 776    ///
 777    /// Default: 640
 778    pub default_width: Option<f32>,
 779    /// Default height in pixels when the assistant is docked to the bottom.
 780    ///
 781    /// Default: 320
 782    pub default_height: Option<f32>,
 783    /// The default OpenAI model to use when creating new chats.
 784    ///
 785    /// Default: gpt-4-1106-preview
 786    pub default_open_ai_model: Option<OpenAiModel>,
 787    /// OpenAI API base URL to use when creating new chats.
 788    ///
 789    /// Default: <https://api.openai.com/v1>
 790    pub openai_api_url: Option<String>,
 791}
 792
 793impl Settings for AssistantSettings {
 794    const KEY: Option<&'static str> = Some("agent");
 795
 796    const FALLBACK_KEY: Option<&'static str> = Some("assistant");
 797
 798    const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
 799
 800    type FileContent = AssistantSettingsContent;
 801
 802    fn load(
 803        sources: SettingsSources<Self::FileContent>,
 804        _: &mut gpui::App,
 805    ) -> anyhow::Result<Self> {
 806        let mut settings = AssistantSettings::default();
 807
 808        for value in sources.defaults_and_customizations() {
 809            if value.is_version_outdated() {
 810                settings.using_outdated_settings_version = true;
 811            }
 812
 813            let value = value.upgrade();
 814            merge(&mut settings.enabled, value.enabled);
 815            merge(&mut settings.button, value.button);
 816            merge(&mut settings.dock, value.dock);
 817            merge(
 818                &mut settings.default_width,
 819                value.default_width.map(Into::into),
 820            );
 821            merge(
 822                &mut settings.default_height,
 823                value.default_height.map(Into::into),
 824            );
 825            merge(&mut settings.default_model, value.default_model);
 826            settings.inline_assistant_model = value
 827                .inline_assistant_model
 828                .or(settings.inline_assistant_model.take());
 829            settings.commit_message_model = value
 830                .commit_message_model
 831                .or(settings.commit_message_model.take());
 832            settings.thread_summary_model = value
 833                .thread_summary_model
 834                .or(settings.thread_summary_model.take());
 835            merge(&mut settings.inline_alternatives, value.inline_alternatives);
 836            merge(
 837                &mut settings.always_allow_tool_actions,
 838                value.always_allow_tool_actions,
 839            );
 840            merge(
 841                &mut settings.notify_when_agent_waiting,
 842                value.notify_when_agent_waiting,
 843            );
 844            merge(&mut settings.stream_edits, value.stream_edits);
 845            merge(&mut settings.single_file_review, value.single_file_review);
 846            merge(&mut settings.default_profile, value.default_profile);
 847            merge(
 848                &mut settings.preferred_completion_mode,
 849                value.preferred_completion_mode,
 850            );
 851
 852            settings
 853                .model_parameters
 854                .extend_from_slice(&value.model_parameters);
 855
 856            if let Some(profiles) = value.profiles {
 857                settings
 858                    .profiles
 859                    .extend(profiles.into_iter().map(|(id, profile)| {
 860                        (
 861                            id,
 862                            AgentProfile {
 863                                name: profile.name.into(),
 864                                tools: profile.tools,
 865                                enable_all_context_servers: profile
 866                                    .enable_all_context_servers
 867                                    .unwrap_or_default(),
 868                                context_servers: profile
 869                                    .context_servers
 870                                    .into_iter()
 871                                    .map(|(context_server_id, preset)| {
 872                                        (
 873                                            context_server_id,
 874                                            ContextServerPreset {
 875                                                tools: preset.tools.clone(),
 876                                            },
 877                                        )
 878                                    })
 879                                    .collect(),
 880                            },
 881                        )
 882                    }));
 883            }
 884        }
 885
 886        Ok(settings)
 887    }
 888
 889    fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
 890        if let Some(b) = vscode
 891            .read_value("chat.agent.enabled")
 892            .and_then(|b| b.as_bool())
 893        {
 894            match &mut current.inner {
 895                Some(AssistantSettingsContentInner::Versioned(versioned)) => {
 896                    match versioned.as_mut() {
 897                        VersionedAssistantSettingsContent::V1(setting) => {
 898                            setting.enabled = Some(b);
 899                            setting.button = Some(b);
 900                        }
 901
 902                        VersionedAssistantSettingsContent::V2(setting) => {
 903                            setting.enabled = Some(b);
 904                            setting.button = Some(b);
 905                        }
 906                    }
 907                }
 908                Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
 909                None => {
 910                    current.inner = Some(AssistantSettingsContentInner::for_v2(
 911                        AssistantSettingsContentV2 {
 912                            enabled: Some(b),
 913                            button: Some(b),
 914                            ..Default::default()
 915                        },
 916                    ));
 917                }
 918            }
 919        }
 920    }
 921}
 922
 923fn merge<T>(target: &mut T, value: Option<T>) {
 924    if let Some(value) = value {
 925        *target = value;
 926    }
 927}
 928
 929#[cfg(test)]
 930mod tests {
 931    use fs::Fs;
 932    use gpui::{ReadGlobal, TestAppContext};
 933    use settings::SettingsStore;
 934
 935    use super::*;
 936
 937    #[gpui::test]
 938    async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
 939        let fs = fs::FakeFs::new(cx.executor().clone());
 940        fs.create_dir(paths::settings_file().parent().unwrap())
 941            .await
 942            .unwrap();
 943
 944        cx.update(|cx| {
 945            let test_settings = settings::SettingsStore::test(cx);
 946            cx.set_global(test_settings);
 947            AssistantSettings::register(cx);
 948        });
 949
 950        cx.update(|cx| {
 951            assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
 952            assert_eq!(
 953                AssistantSettings::get_global(cx).default_model,
 954                LanguageModelSelection {
 955                    provider: "zed.dev".into(),
 956                    model: "claude-3-7-sonnet-latest".into(),
 957                }
 958            );
 959        });
 960
 961        cx.update(|cx| {
 962            settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
 963                fs.clone(),
 964                |settings, _| {
 965                    *settings = AssistantSettingsContent {
 966                        inner: Some(AssistantSettingsContentInner::for_v2(
 967                            AssistantSettingsContentV2 {
 968                                default_model: Some(LanguageModelSelection {
 969                                    provider: "test-provider".into(),
 970                                    model: "gpt-99".into(),
 971                                }),
 972                                inline_assistant_model: None,
 973                                commit_message_model: None,
 974                                thread_summary_model: None,
 975                                inline_alternatives: None,
 976                                enabled: None,
 977                                button: None,
 978                                dock: None,
 979                                default_width: None,
 980                                default_height: None,
 981                                default_profile: None,
 982                                profiles: None,
 983                                always_allow_tool_actions: None,
 984                                notify_when_agent_waiting: None,
 985                                stream_edits: None,
 986                                single_file_review: None,
 987                                model_parameters: Vec::new(),
 988                                preferred_completion_mode: None,
 989                            },
 990                        )),
 991                    }
 992                },
 993            );
 994        });
 995
 996        cx.run_until_parked();
 997
 998        let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
 999        assert!(raw_settings_value.contains(r#""version": "2""#));
1000
1001        #[derive(Debug, Deserialize)]
1002        struct AssistantSettingsTest {
1003            agent: AssistantSettingsContent,
1004        }
1005
1006        let assistant_settings: AssistantSettingsTest =
1007            serde_json_lenient::from_str(&raw_settings_value).unwrap();
1008
1009        assert!(!assistant_settings.agent.is_version_outdated());
1010    }
1011
1012    #[gpui::test]
1013    async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1014        let fs = fs::FakeFs::new(cx.executor().clone());
1015        fs.create_dir(paths::settings_file().parent().unwrap())
1016            .await
1017            .unwrap();
1018
1019        cx.update(|cx| {
1020            let mut test_settings = settings::SettingsStore::test(cx);
1021            let user_settings_content = r#"{
1022            "assistant": {
1023                "enabled": true,
1024                "version": "2",
1025                "default_model": {
1026                  "provider": "zed.dev",
1027                  "model": "gpt-99"
1028                },
1029            }}"#;
1030            test_settings
1031                .set_user_settings(user_settings_content, cx)
1032                .unwrap();
1033            cx.set_global(test_settings);
1034            AssistantSettings::register(cx);
1035        });
1036
1037        cx.run_until_parked();
1038
1039        let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1040        assert!(assistant_settings.enabled);
1041        assert!(!assistant_settings.using_outdated_settings_version);
1042        assert_eq!(assistant_settings.default_model.model, "gpt-99");
1043
1044        cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1045            settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1046                *settings = AssistantSettingsContent {
1047                    inner: Some(AssistantSettingsContentInner::for_v2(
1048                        AssistantSettingsContentV2 {
1049                            enabled: Some(false),
1050                            default_model: Some(LanguageModelSelection {
1051                                provider: "xai".to_owned().into(),
1052                                model: "grok".to_owned(),
1053                            }),
1054                            ..Default::default()
1055                        },
1056                    )),
1057                };
1058            });
1059        });
1060
1061        cx.run_until_parked();
1062
1063        let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1064
1065        #[derive(Debug, Deserialize)]
1066        struct AssistantSettingsTest {
1067            assistant: AssistantSettingsContent,
1068            agent: Option<serde_json_lenient::Value>,
1069        }
1070
1071        let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1072        assert!(assistant_settings.agent.is_none());
1073    }
1074}