1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::LanguageModel;
12use lmstudio::Model as LmStudioModel;
13use mistral::Model as MistralModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21pub fn init(cx: &mut App) {
22 AssistantSettings::register(cx);
23}
24
25#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
26#[serde(rename_all = "snake_case")]
27pub enum AssistantDockPosition {
28 Left,
29 #[default]
30 Right,
31 Bottom,
32}
33
34#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
35#[serde(rename_all = "snake_case")]
36pub enum DefaultView {
37 #[default]
38 Thread,
39 TextThread,
40}
41
42#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
43#[serde(rename_all = "snake_case")]
44pub enum NotifyWhenAgentWaiting {
45 #[default]
46 PrimaryScreen,
47 AllScreens,
48 Never,
49}
50
51#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
52#[serde(tag = "name", rename_all = "snake_case")]
53#[schemars(deny_unknown_fields)]
54pub enum AssistantProviderContentV1 {
55 #[serde(rename = "zed.dev")]
56 ZedDotDev { default_model: Option<String> },
57 #[serde(rename = "openai")]
58 OpenAi {
59 default_model: Option<OpenAiModel>,
60 api_url: Option<String>,
61 available_models: Option<Vec<OpenAiModel>>,
62 },
63 #[serde(rename = "anthropic")]
64 Anthropic {
65 default_model: Option<AnthropicModel>,
66 api_url: Option<String>,
67 },
68 #[serde(rename = "ollama")]
69 Ollama {
70 default_model: Option<OllamaModel>,
71 api_url: Option<String>,
72 },
73 #[serde(rename = "lmstudio")]
74 LmStudio {
75 default_model: Option<LmStudioModel>,
76 api_url: Option<String>,
77 },
78 #[serde(rename = "deepseek")]
79 DeepSeek {
80 default_model: Option<DeepseekModel>,
81 api_url: Option<String>,
82 },
83 #[serde(rename = "mistral")]
84 Mistral {
85 default_model: Option<MistralModel>,
86 api_url: Option<String>,
87 },
88}
89
90#[derive(Default, Clone, Debug)]
91pub struct AssistantSettings {
92 pub enabled: bool,
93 pub button: bool,
94 pub dock: AssistantDockPosition,
95 pub default_width: Pixels,
96 pub default_height: Pixels,
97 pub default_model: LanguageModelSelection,
98 pub inline_assistant_model: Option<LanguageModelSelection>,
99 pub commit_message_model: Option<LanguageModelSelection>,
100 pub thread_summary_model: Option<LanguageModelSelection>,
101 pub inline_alternatives: Vec<LanguageModelSelection>,
102 pub using_outdated_settings_version: bool,
103 pub default_profile: AgentProfileId,
104 pub default_view: DefaultView,
105 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
106 pub always_allow_tool_actions: bool,
107 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
108 pub stream_edits: bool,
109 pub single_file_review: bool,
110 pub model_parameters: Vec<LanguageModelParameters>,
111 pub preferred_completion_mode: CompletionMode,
112 pub enable_feedback: bool,
113}
114
115impl AssistantSettings {
116 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
117 let settings = Self::get_global(cx);
118 settings
119 .model_parameters
120 .iter()
121 .rfind(|setting| setting.matches(model))
122 .and_then(|m| m.temperature)
123 }
124
125 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
126 self.inline_assistant_model = Some(LanguageModelSelection {
127 provider: provider.into(),
128 model,
129 });
130 }
131
132 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
133 self.commit_message_model = Some(LanguageModelSelection {
134 provider: provider.into(),
135 model,
136 });
137 }
138
139 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
140 self.thread_summary_model = Some(LanguageModelSelection {
141 provider: provider.into(),
142 model,
143 });
144 }
145}
146
147#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
148pub struct LanguageModelParameters {
149 pub provider: Option<LanguageModelProviderSetting>,
150 pub model: Option<SharedString>,
151 pub temperature: Option<f32>,
152}
153
154impl LanguageModelParameters {
155 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
156 if let Some(provider) = &self.provider {
157 if provider.0 != model.provider_id().0 {
158 return false;
159 }
160 }
161 if let Some(setting_model) = &self.model {
162 if *setting_model != model.id().0 {
163 return false;
164 }
165 }
166 true
167 }
168}
169
170/// Assistant panel settings
171#[derive(Clone, Serialize, Deserialize, Debug, Default)]
172pub struct AssistantSettingsContent {
173 #[serde(flatten)]
174 pub inner: Option<AssistantSettingsContentInner>,
175}
176
177#[derive(Clone, Serialize, Deserialize, Debug)]
178#[serde(untagged)]
179pub enum AssistantSettingsContentInner {
180 Versioned(Box<VersionedAssistantSettingsContent>),
181 Legacy(LegacyAssistantSettingsContent),
182}
183
184impl AssistantSettingsContentInner {
185 fn for_v2(content: AssistantSettingsContentV2) -> Self {
186 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
187 content,
188 )))
189 }
190}
191
192impl JsonSchema for AssistantSettingsContent {
193 fn schema_name() -> String {
194 VersionedAssistantSettingsContent::schema_name()
195 }
196
197 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
198 VersionedAssistantSettingsContent::json_schema(r#gen)
199 }
200
201 fn is_referenceable() -> bool {
202 VersionedAssistantSettingsContent::is_referenceable()
203 }
204}
205
206impl AssistantSettingsContent {
207 pub fn is_version_outdated(&self) -> bool {
208 match &self.inner {
209 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
210 VersionedAssistantSettingsContent::V1(_) => true,
211 VersionedAssistantSettingsContent::V2(_) => false,
212 },
213 Some(AssistantSettingsContentInner::Legacy(_)) => true,
214 None => false,
215 }
216 }
217
218 fn upgrade(&self) -> AssistantSettingsContentV2 {
219 match &self.inner {
220 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
221 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
222 enabled: settings.enabled,
223 button: settings.button,
224 dock: settings.dock,
225 default_width: settings.default_width,
226 default_height: settings.default_width,
227 default_model: settings
228 .provider
229 .clone()
230 .and_then(|provider| match provider {
231 AssistantProviderContentV1::ZedDotDev { default_model } => {
232 default_model.map(|model| LanguageModelSelection {
233 provider: "zed.dev".into(),
234 model,
235 })
236 }
237 AssistantProviderContentV1::OpenAi { default_model, .. } => {
238 default_model.map(|model| LanguageModelSelection {
239 provider: "openai".into(),
240 model: model.id().to_string(),
241 })
242 }
243 AssistantProviderContentV1::Anthropic { default_model, .. } => {
244 default_model.map(|model| LanguageModelSelection {
245 provider: "anthropic".into(),
246 model: model.id().to_string(),
247 })
248 }
249 AssistantProviderContentV1::Ollama { default_model, .. } => {
250 default_model.map(|model| LanguageModelSelection {
251 provider: "ollama".into(),
252 model: model.id().to_string(),
253 })
254 }
255 AssistantProviderContentV1::LmStudio { default_model, .. } => {
256 default_model.map(|model| LanguageModelSelection {
257 provider: "lmstudio".into(),
258 model: model.id().to_string(),
259 })
260 }
261 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
262 default_model.map(|model| LanguageModelSelection {
263 provider: "deepseek".into(),
264 model: model.id().to_string(),
265 })
266 }
267 AssistantProviderContentV1::Mistral { default_model, .. } => {
268 default_model.map(|model| LanguageModelSelection {
269 provider: "mistral".into(),
270 model: model.id().to_string(),
271 })
272 }
273 }),
274 inline_assistant_model: None,
275 commit_message_model: None,
276 thread_summary_model: None,
277 inline_alternatives: None,
278 default_profile: None,
279 default_view: None,
280 profiles: None,
281 always_allow_tool_actions: None,
282 notify_when_agent_waiting: None,
283 stream_edits: None,
284 single_file_review: None,
285 model_parameters: Vec::new(),
286 preferred_completion_mode: None,
287 enable_feedback: None,
288 },
289 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
290 },
291 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
292 enabled: None,
293 button: settings.button,
294 dock: settings.dock,
295 default_width: settings.default_width,
296 default_height: settings.default_height,
297 default_model: Some(LanguageModelSelection {
298 provider: "openai".into(),
299 model: settings
300 .default_open_ai_model
301 .clone()
302 .unwrap_or_default()
303 .id()
304 .to_string(),
305 }),
306 inline_assistant_model: None,
307 commit_message_model: None,
308 thread_summary_model: None,
309 inline_alternatives: None,
310 default_profile: None,
311 default_view: None,
312 profiles: None,
313 always_allow_tool_actions: None,
314 notify_when_agent_waiting: None,
315 stream_edits: None,
316 single_file_review: None,
317 model_parameters: Vec::new(),
318 preferred_completion_mode: None,
319 enable_feedback: None,
320 },
321 None => AssistantSettingsContentV2::default(),
322 }
323 }
324
325 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
326 match &mut self.inner {
327 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
328 VersionedAssistantSettingsContent::V1(ref mut settings) => {
329 settings.dock = Some(dock);
330 }
331 VersionedAssistantSettingsContent::V2(ref mut settings) => {
332 settings.dock = Some(dock);
333 }
334 },
335 Some(AssistantSettingsContentInner::Legacy(settings)) => {
336 settings.dock = Some(dock);
337 }
338 None => {
339 self.inner = Some(AssistantSettingsContentInner::for_v2(
340 AssistantSettingsContentV2 {
341 dock: Some(dock),
342 ..Default::default()
343 },
344 ))
345 }
346 }
347 }
348
349 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
350 let model = language_model.id().0.to_string();
351 let provider = language_model.provider_id().0.to_string();
352
353 match &mut self.inner {
354 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
355 VersionedAssistantSettingsContent::V1(ref mut settings) => {
356 match provider.as_ref() {
357 "zed.dev" => {
358 log::warn!("attempted to set zed.dev model on outdated settings");
359 }
360 "anthropic" => {
361 let api_url = match &settings.provider {
362 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
363 api_url.clone()
364 }
365 _ => None,
366 };
367 settings.provider = Some(AssistantProviderContentV1::Anthropic {
368 default_model: AnthropicModel::from_id(&model).ok(),
369 api_url,
370 });
371 }
372 "ollama" => {
373 let api_url = match &settings.provider {
374 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
375 api_url.clone()
376 }
377 _ => None,
378 };
379 settings.provider = Some(AssistantProviderContentV1::Ollama {
380 default_model: Some(ollama::Model::new(
381 &model,
382 None,
383 None,
384 Some(language_model.supports_tools()),
385 )),
386 api_url,
387 });
388 }
389 "lmstudio" => {
390 let api_url = match &settings.provider {
391 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
392 api_url.clone()
393 }
394 _ => None,
395 };
396 settings.provider = Some(AssistantProviderContentV1::LmStudio {
397 default_model: Some(lmstudio::Model::new(
398 &model, None, None, false,
399 )),
400 api_url,
401 });
402 }
403 "openai" => {
404 let (api_url, available_models) = match &settings.provider {
405 Some(AssistantProviderContentV1::OpenAi {
406 api_url,
407 available_models,
408 ..
409 }) => (api_url.clone(), available_models.clone()),
410 _ => (None, None),
411 };
412 settings.provider = Some(AssistantProviderContentV1::OpenAi {
413 default_model: OpenAiModel::from_id(&model).ok(),
414 api_url,
415 available_models,
416 });
417 }
418 "deepseek" => {
419 let api_url = match &settings.provider {
420 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
421 api_url.clone()
422 }
423 _ => None,
424 };
425 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
426 default_model: DeepseekModel::from_id(&model).ok(),
427 api_url,
428 });
429 }
430 _ => {}
431 }
432 }
433 VersionedAssistantSettingsContent::V2(ref mut settings) => {
434 settings.default_model = Some(LanguageModelSelection {
435 provider: provider.into(),
436 model,
437 });
438 }
439 },
440 Some(AssistantSettingsContentInner::Legacy(settings)) => {
441 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
442 settings.default_open_ai_model = Some(model);
443 }
444 }
445 None => {
446 self.inner = Some(AssistantSettingsContentInner::for_v2(
447 AssistantSettingsContentV2 {
448 default_model: Some(LanguageModelSelection {
449 provider: provider.into(),
450 model,
451 }),
452 ..Default::default()
453 },
454 ));
455 }
456 }
457 }
458
459 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
460 self.v2_setting(|setting| {
461 setting.inline_assistant_model = Some(LanguageModelSelection {
462 provider: provider.into(),
463 model,
464 });
465 Ok(())
466 })
467 .ok();
468 }
469
470 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
471 self.v2_setting(|setting| {
472 setting.commit_message_model = Some(LanguageModelSelection {
473 provider: provider.into(),
474 model,
475 });
476 Ok(())
477 })
478 .ok();
479 }
480
481 pub fn v2_setting(
482 &mut self,
483 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
484 ) -> anyhow::Result<()> {
485 match self.inner.get_or_insert_with(|| {
486 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
487 ..Default::default()
488 })
489 }) {
490 AssistantSettingsContentInner::Versioned(boxed) => {
491 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
492 f(settings)
493 } else {
494 Ok(())
495 }
496 }
497 _ => Ok(()),
498 }
499 }
500
501 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
502 self.v2_setting(|setting| {
503 setting.thread_summary_model = Some(LanguageModelSelection {
504 provider: provider.into(),
505 model,
506 });
507 Ok(())
508 })
509 .ok();
510 }
511
512 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
513 self.v2_setting(|setting| {
514 setting.always_allow_tool_actions = Some(allow);
515 Ok(())
516 })
517 .ok();
518 }
519
520 pub fn set_single_file_review(&mut self, allow: bool) {
521 self.v2_setting(|setting| {
522 setting.single_file_review = Some(allow);
523 Ok(())
524 })
525 .ok();
526 }
527
528 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
529 self.v2_setting(|setting| {
530 setting.default_profile = Some(profile_id);
531 Ok(())
532 })
533 .ok();
534 }
535
536 pub fn create_profile(
537 &mut self,
538 profile_id: AgentProfileId,
539 profile: AgentProfile,
540 ) -> Result<()> {
541 self.v2_setting(|settings| {
542 let profiles = settings.profiles.get_or_insert_default();
543 if profiles.contains_key(&profile_id) {
544 bail!("profile with ID '{profile_id}' already exists");
545 }
546
547 profiles.insert(
548 profile_id,
549 AgentProfileContent {
550 name: profile.name.into(),
551 tools: profile.tools,
552 enable_all_context_servers: Some(profile.enable_all_context_servers),
553 context_servers: profile
554 .context_servers
555 .into_iter()
556 .map(|(server_id, preset)| {
557 (
558 server_id,
559 ContextServerPresetContent {
560 tools: preset.tools,
561 },
562 )
563 })
564 .collect(),
565 },
566 );
567
568 Ok(())
569 })
570 }
571}
572
573#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
574#[serde(tag = "version")]
575#[schemars(deny_unknown_fields)]
576pub enum VersionedAssistantSettingsContent {
577 #[serde(rename = "1")]
578 V1(AssistantSettingsContentV1),
579 #[serde(rename = "2")]
580 V2(AssistantSettingsContentV2),
581}
582
583impl Default for VersionedAssistantSettingsContent {
584 fn default() -> Self {
585 Self::V2(AssistantSettingsContentV2 {
586 enabled: None,
587 button: None,
588 dock: None,
589 default_width: None,
590 default_height: None,
591 default_model: None,
592 inline_assistant_model: None,
593 commit_message_model: None,
594 thread_summary_model: None,
595 inline_alternatives: None,
596 default_profile: None,
597 default_view: None,
598 profiles: None,
599 always_allow_tool_actions: None,
600 notify_when_agent_waiting: None,
601 stream_edits: None,
602 single_file_review: None,
603 model_parameters: Vec::new(),
604 preferred_completion_mode: None,
605 enable_feedback: None,
606 })
607 }
608}
609
610#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
611#[schemars(deny_unknown_fields)]
612pub struct AssistantSettingsContentV2 {
613 /// Whether the Assistant is enabled.
614 ///
615 /// Default: true
616 enabled: Option<bool>,
617 /// Whether to show the agent panel button in the status bar.
618 ///
619 /// Default: true
620 button: Option<bool>,
621 /// Where to dock the agent panel.
622 ///
623 /// Default: right
624 dock: Option<AssistantDockPosition>,
625 /// Default width in pixels when the agent panel is docked to the left or right.
626 ///
627 /// Default: 640
628 default_width: Option<f32>,
629 /// Default height in pixels when the agent panel is docked to the bottom.
630 ///
631 /// Default: 320
632 default_height: Option<f32>,
633 /// The default model to use when creating new chats and for other features when a specific model is not specified.
634 default_model: Option<LanguageModelSelection>,
635 /// Model to use for the inline assistant. Defaults to default_model when not specified.
636 inline_assistant_model: Option<LanguageModelSelection>,
637 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
638 commit_message_model: Option<LanguageModelSelection>,
639 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
640 thread_summary_model: Option<LanguageModelSelection>,
641 /// Additional models with which to generate alternatives when performing inline assists.
642 inline_alternatives: Option<Vec<LanguageModelSelection>>,
643 /// The default profile to use in the Agent.
644 ///
645 /// Default: write
646 default_profile: Option<AgentProfileId>,
647 /// Which view type to show by default in the agent panel.
648 ///
649 /// Default: "thread"
650 default_view: Option<DefaultView>,
651 /// The available agent profiles.
652 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
653 /// Whenever a tool action would normally wait for your confirmation
654 /// that you allow it, always choose to allow it.
655 ///
656 /// Default: false
657 always_allow_tool_actions: Option<bool>,
658 /// Where to show a popup notification when the agent is waiting for user input.
659 ///
660 /// Default: "primary_screen"
661 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
662 /// Whether to stream edits from the agent as they are received.
663 ///
664 /// Default: false
665 stream_edits: Option<bool>,
666 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
667 ///
668 /// Default: true
669 single_file_review: Option<bool>,
670 /// Additional parameters for language model requests. When making a request
671 /// to a model, parameters will be taken from the last entry in this list
672 /// that matches the model's provider and name. In each entry, both provider
673 /// and model are optional, so that you can specify parameters for either
674 /// one.
675 ///
676 /// Default: []
677 #[serde(default)]
678 model_parameters: Vec<LanguageModelParameters>,
679 /// What completion mode to enable for new threads
680 ///
681 /// Default: normal
682 preferred_completion_mode: Option<CompletionMode>,
683 /// Whether to show thumb buttons for feedback in the agent panel.
684 ///
685 /// Default: true
686 enable_feedback: Option<bool>,
687}
688
689#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
690#[serde(rename_all = "snake_case")]
691pub enum CompletionMode {
692 #[default]
693 Normal,
694 Max,
695}
696
697impl From<CompletionMode> for zed_llm_client::CompletionMode {
698 fn from(value: CompletionMode) -> Self {
699 match value {
700 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
701 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
702 }
703 }
704}
705
706#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
707pub struct LanguageModelSelection {
708 pub provider: LanguageModelProviderSetting,
709 pub model: String,
710}
711
712#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
713pub struct LanguageModelProviderSetting(pub String);
714
715impl JsonSchema for LanguageModelProviderSetting {
716 fn schema_name() -> String {
717 "LanguageModelProviderSetting".into()
718 }
719
720 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
721 schemars::schema::SchemaObject {
722 enum_values: Some(vec![
723 "anthropic".into(),
724 "amazon-bedrock".into(),
725 "google".into(),
726 "lmstudio".into(),
727 "ollama".into(),
728 "openai".into(),
729 "zed.dev".into(),
730 "copilot_chat".into(),
731 "deepseek".into(),
732 "mistral".into(),
733 ]),
734 ..Default::default()
735 }
736 .into()
737 }
738}
739
740impl From<String> for LanguageModelProviderSetting {
741 fn from(provider: String) -> Self {
742 Self(provider)
743 }
744}
745
746impl From<&str> for LanguageModelProviderSetting {
747 fn from(provider: &str) -> Self {
748 Self(provider.to_string())
749 }
750}
751
752impl Default for LanguageModelSelection {
753 fn default() -> Self {
754 Self {
755 provider: LanguageModelProviderSetting("openai".to_string()),
756 model: "gpt-4".to_string(),
757 }
758 }
759}
760
761#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
762pub struct AgentProfileContent {
763 pub name: Arc<str>,
764 #[serde(default)]
765 pub tools: IndexMap<Arc<str>, bool>,
766 /// Whether all context servers are enabled by default.
767 pub enable_all_context_servers: Option<bool>,
768 #[serde(default)]
769 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
770}
771
772#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
773pub struct ContextServerPresetContent {
774 pub tools: IndexMap<Arc<str>, bool>,
775}
776
777#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
778#[schemars(deny_unknown_fields)]
779pub struct AssistantSettingsContentV1 {
780 /// Whether the Assistant is enabled.
781 ///
782 /// Default: true
783 enabled: Option<bool>,
784 /// Whether to show the assistant panel button in the status bar.
785 ///
786 /// Default: true
787 button: Option<bool>,
788 /// Where to dock the assistant.
789 ///
790 /// Default: right
791 dock: Option<AssistantDockPosition>,
792 /// Default width in pixels when the assistant is docked to the left or right.
793 ///
794 /// Default: 640
795 default_width: Option<f32>,
796 /// Default height in pixels when the assistant is docked to the bottom.
797 ///
798 /// Default: 320
799 default_height: Option<f32>,
800 /// The provider of the assistant service.
801 ///
802 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
803 /// each with their respective default models and configurations.
804 provider: Option<AssistantProviderContentV1>,
805}
806
807#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
808#[schemars(deny_unknown_fields)]
809pub struct LegacyAssistantSettingsContent {
810 /// Whether to show the assistant panel button in the status bar.
811 ///
812 /// Default: true
813 pub button: Option<bool>,
814 /// Where to dock the assistant.
815 ///
816 /// Default: right
817 pub dock: Option<AssistantDockPosition>,
818 /// Default width in pixels when the assistant is docked to the left or right.
819 ///
820 /// Default: 640
821 pub default_width: Option<f32>,
822 /// Default height in pixels when the assistant is docked to the bottom.
823 ///
824 /// Default: 320
825 pub default_height: Option<f32>,
826 /// The default OpenAI model to use when creating new chats.
827 ///
828 /// Default: gpt-4-1106-preview
829 pub default_open_ai_model: Option<OpenAiModel>,
830 /// OpenAI API base URL to use when creating new chats.
831 ///
832 /// Default: <https://api.openai.com/v1>
833 pub openai_api_url: Option<String>,
834}
835
836impl Settings for AssistantSettings {
837 const KEY: Option<&'static str> = Some("agent");
838
839 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
840
841 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
842
843 type FileContent = AssistantSettingsContent;
844
845 fn load(
846 sources: SettingsSources<Self::FileContent>,
847 _: &mut gpui::App,
848 ) -> anyhow::Result<Self> {
849 let mut settings = AssistantSettings::default();
850
851 for value in sources.defaults_and_customizations() {
852 if value.is_version_outdated() {
853 settings.using_outdated_settings_version = true;
854 }
855
856 let value = value.upgrade();
857 merge(&mut settings.enabled, value.enabled);
858 merge(&mut settings.button, value.button);
859 merge(&mut settings.dock, value.dock);
860 merge(
861 &mut settings.default_width,
862 value.default_width.map(Into::into),
863 );
864 merge(
865 &mut settings.default_height,
866 value.default_height.map(Into::into),
867 );
868 merge(&mut settings.default_model, value.default_model);
869 settings.inline_assistant_model = value
870 .inline_assistant_model
871 .or(settings.inline_assistant_model.take());
872 settings.commit_message_model = value
873 .commit_message_model
874 .or(settings.commit_message_model.take());
875 settings.thread_summary_model = value
876 .thread_summary_model
877 .or(settings.thread_summary_model.take());
878 merge(&mut settings.inline_alternatives, value.inline_alternatives);
879 merge(
880 &mut settings.always_allow_tool_actions,
881 value.always_allow_tool_actions,
882 );
883 merge(
884 &mut settings.notify_when_agent_waiting,
885 value.notify_when_agent_waiting,
886 );
887 merge(&mut settings.stream_edits, value.stream_edits);
888 merge(&mut settings.single_file_review, value.single_file_review);
889 merge(&mut settings.default_profile, value.default_profile);
890 merge(&mut settings.default_view, value.default_view);
891 merge(
892 &mut settings.preferred_completion_mode,
893 value.preferred_completion_mode,
894 );
895 merge(&mut settings.enable_feedback, value.enable_feedback);
896
897 settings
898 .model_parameters
899 .extend_from_slice(&value.model_parameters);
900
901 if let Some(profiles) = value.profiles {
902 settings
903 .profiles
904 .extend(profiles.into_iter().map(|(id, profile)| {
905 (
906 id,
907 AgentProfile {
908 name: profile.name.into(),
909 tools: profile.tools,
910 enable_all_context_servers: profile
911 .enable_all_context_servers
912 .unwrap_or_default(),
913 context_servers: profile
914 .context_servers
915 .into_iter()
916 .map(|(context_server_id, preset)| {
917 (
918 context_server_id,
919 ContextServerPreset {
920 tools: preset.tools.clone(),
921 },
922 )
923 })
924 .collect(),
925 },
926 )
927 }));
928 }
929 }
930
931 Ok(settings)
932 }
933
934 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
935 if let Some(b) = vscode
936 .read_value("chat.agent.enabled")
937 .and_then(|b| b.as_bool())
938 {
939 match &mut current.inner {
940 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
941 match versioned.as_mut() {
942 VersionedAssistantSettingsContent::V1(setting) => {
943 setting.enabled = Some(b);
944 setting.button = Some(b);
945 }
946
947 VersionedAssistantSettingsContent::V2(setting) => {
948 setting.enabled = Some(b);
949 setting.button = Some(b);
950 }
951 }
952 }
953 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
954 None => {
955 current.inner = Some(AssistantSettingsContentInner::for_v2(
956 AssistantSettingsContentV2 {
957 enabled: Some(b),
958 button: Some(b),
959 ..Default::default()
960 },
961 ));
962 }
963 }
964 }
965 }
966}
967
968fn merge<T>(target: &mut T, value: Option<T>) {
969 if let Some(value) = value {
970 *target = value;
971 }
972}
973
974#[cfg(test)]
975mod tests {
976 use fs::Fs;
977 use gpui::{ReadGlobal, TestAppContext};
978 use settings::SettingsStore;
979
980 use super::*;
981
982 #[gpui::test]
983 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
984 let fs = fs::FakeFs::new(cx.executor().clone());
985 fs.create_dir(paths::settings_file().parent().unwrap())
986 .await
987 .unwrap();
988
989 cx.update(|cx| {
990 let test_settings = settings::SettingsStore::test(cx);
991 cx.set_global(test_settings);
992 AssistantSettings::register(cx);
993 });
994
995 cx.update(|cx| {
996 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
997 assert_eq!(
998 AssistantSettings::get_global(cx).default_model,
999 LanguageModelSelection {
1000 provider: "zed.dev".into(),
1001 model: "claude-3-7-sonnet-latest".into(),
1002 }
1003 );
1004 });
1005
1006 cx.update(|cx| {
1007 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
1008 fs.clone(),
1009 |settings, _| {
1010 *settings = AssistantSettingsContent {
1011 inner: Some(AssistantSettingsContentInner::for_v2(
1012 AssistantSettingsContentV2 {
1013 default_model: Some(LanguageModelSelection {
1014 provider: "test-provider".into(),
1015 model: "gpt-99".into(),
1016 }),
1017 inline_assistant_model: None,
1018 commit_message_model: None,
1019 thread_summary_model: None,
1020 inline_alternatives: None,
1021 enabled: None,
1022 button: None,
1023 dock: None,
1024 default_width: None,
1025 default_height: None,
1026 default_profile: None,
1027 default_view: None,
1028 profiles: None,
1029 always_allow_tool_actions: None,
1030 notify_when_agent_waiting: None,
1031 stream_edits: None,
1032 single_file_review: None,
1033 enable_feedback: None,
1034 model_parameters: Vec::new(),
1035 preferred_completion_mode: None,
1036 },
1037 )),
1038 }
1039 },
1040 );
1041 });
1042
1043 cx.run_until_parked();
1044
1045 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1046 assert!(raw_settings_value.contains(r#""version": "2""#));
1047
1048 #[derive(Debug, Deserialize)]
1049 struct AssistantSettingsTest {
1050 agent: AssistantSettingsContent,
1051 }
1052
1053 let assistant_settings: AssistantSettingsTest =
1054 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1055
1056 assert!(!assistant_settings.agent.is_version_outdated());
1057 }
1058
1059 #[gpui::test]
1060 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1061 let fs = fs::FakeFs::new(cx.executor().clone());
1062 fs.create_dir(paths::settings_file().parent().unwrap())
1063 .await
1064 .unwrap();
1065
1066 cx.update(|cx| {
1067 let mut test_settings = settings::SettingsStore::test(cx);
1068 let user_settings_content = r#"{
1069 "assistant": {
1070 "enabled": true,
1071 "version": "2",
1072 "default_model": {
1073 "provider": "zed.dev",
1074 "model": "gpt-99"
1075 },
1076 }}"#;
1077 test_settings
1078 .set_user_settings(user_settings_content, cx)
1079 .unwrap();
1080 cx.set_global(test_settings);
1081 AssistantSettings::register(cx);
1082 });
1083
1084 cx.run_until_parked();
1085
1086 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1087 assert!(assistant_settings.enabled);
1088 assert!(!assistant_settings.using_outdated_settings_version);
1089 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1090
1091 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1092 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1093 *settings = AssistantSettingsContent {
1094 inner: Some(AssistantSettingsContentInner::for_v2(
1095 AssistantSettingsContentV2 {
1096 enabled: Some(false),
1097 default_model: Some(LanguageModelSelection {
1098 provider: "xai".to_owned().into(),
1099 model: "grok".to_owned(),
1100 }),
1101 ..Default::default()
1102 },
1103 )),
1104 };
1105 });
1106 });
1107
1108 cx.run_until_parked();
1109
1110 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1111
1112 #[derive(Debug, Deserialize)]
1113 struct AssistantSettingsTest {
1114 assistant: AssistantSettingsContent,
1115 agent: Option<serde_json_lenient::Value>,
1116 }
1117
1118 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1119 assert!(assistant_settings.agent.is_none());
1120 }
1121}