1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::LanguageModel;
12use lmstudio::Model as LmStudioModel;
13use mistral::Model as MistralModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21pub fn init(cx: &mut App) {
22 AssistantSettings::register(cx);
23}
24
25#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
26#[serde(rename_all = "snake_case")]
27pub enum AssistantDockPosition {
28 Left,
29 #[default]
30 Right,
31 Bottom,
32}
33
34#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
35#[serde(rename_all = "snake_case")]
36pub enum NotifyWhenAgentWaiting {
37 #[default]
38 PrimaryScreen,
39 AllScreens,
40 Never,
41}
42
43#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
44#[serde(tag = "name", rename_all = "snake_case")]
45#[schemars(deny_unknown_fields)]
46pub enum AssistantProviderContentV1 {
47 #[serde(rename = "zed.dev")]
48 ZedDotDev { default_model: Option<String> },
49 #[serde(rename = "openai")]
50 OpenAi {
51 default_model: Option<OpenAiModel>,
52 api_url: Option<String>,
53 available_models: Option<Vec<OpenAiModel>>,
54 },
55 #[serde(rename = "anthropic")]
56 Anthropic {
57 default_model: Option<AnthropicModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "ollama")]
61 Ollama {
62 default_model: Option<OllamaModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "lmstudio")]
66 LmStudio {
67 default_model: Option<LmStudioModel>,
68 api_url: Option<String>,
69 },
70 #[serde(rename = "deepseek")]
71 DeepSeek {
72 default_model: Option<DeepseekModel>,
73 api_url: Option<String>,
74 },
75 #[serde(rename = "mistral")]
76 Mistral {
77 default_model: Option<MistralModel>,
78 api_url: Option<String>,
79 },
80}
81
82#[derive(Default, Clone, Debug)]
83pub struct AssistantSettings {
84 pub enabled: bool,
85 pub button: bool,
86 pub dock: AssistantDockPosition,
87 pub default_width: Pixels,
88 pub default_height: Pixels,
89 pub default_model: LanguageModelSelection,
90 pub inline_assistant_model: Option<LanguageModelSelection>,
91 pub commit_message_model: Option<LanguageModelSelection>,
92 pub thread_summary_model: Option<LanguageModelSelection>,
93 pub inline_alternatives: Vec<LanguageModelSelection>,
94 pub using_outdated_settings_version: bool,
95 pub default_profile: AgentProfileId,
96 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
97 pub always_allow_tool_actions: bool,
98 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
99 pub stream_edits: bool,
100 pub single_file_review: bool,
101 pub model_parameters: Vec<LanguageModelParameters>,
102 pub preferred_completion_mode: CompletionMode,
103 pub enable_feedback: bool,
104}
105
106impl AssistantSettings {
107 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
108 let settings = Self::get_global(cx);
109 settings
110 .model_parameters
111 .iter()
112 .rfind(|setting| setting.matches(model))
113 .and_then(|m| m.temperature)
114 }
115
116 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
117 self.inline_assistant_model = Some(LanguageModelSelection {
118 provider: provider.into(),
119 model,
120 });
121 }
122
123 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
124 self.commit_message_model = Some(LanguageModelSelection {
125 provider: provider.into(),
126 model,
127 });
128 }
129
130 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
131 self.thread_summary_model = Some(LanguageModelSelection {
132 provider: provider.into(),
133 model,
134 });
135 }
136}
137
138#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
139pub struct LanguageModelParameters {
140 pub provider: Option<LanguageModelProviderSetting>,
141 pub model: Option<SharedString>,
142 pub temperature: Option<f32>,
143}
144
145impl LanguageModelParameters {
146 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
147 if let Some(provider) = &self.provider {
148 if provider.0 != model.provider_id().0 {
149 return false;
150 }
151 }
152 if let Some(setting_model) = &self.model {
153 if *setting_model != model.id().0 {
154 return false;
155 }
156 }
157 true
158 }
159}
160
161/// Assistant panel settings
162#[derive(Clone, Serialize, Deserialize, Debug, Default)]
163pub struct AssistantSettingsContent {
164 #[serde(flatten)]
165 pub inner: Option<AssistantSettingsContentInner>,
166}
167
168#[derive(Clone, Serialize, Deserialize, Debug)]
169#[serde(untagged)]
170pub enum AssistantSettingsContentInner {
171 Versioned(Box<VersionedAssistantSettingsContent>),
172 Legacy(LegacyAssistantSettingsContent),
173}
174
175impl AssistantSettingsContentInner {
176 fn for_v2(content: AssistantSettingsContentV2) -> Self {
177 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
178 content,
179 )))
180 }
181}
182
183impl JsonSchema for AssistantSettingsContent {
184 fn schema_name() -> String {
185 VersionedAssistantSettingsContent::schema_name()
186 }
187
188 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
189 VersionedAssistantSettingsContent::json_schema(r#gen)
190 }
191
192 fn is_referenceable() -> bool {
193 VersionedAssistantSettingsContent::is_referenceable()
194 }
195}
196
197impl AssistantSettingsContent {
198 pub fn is_version_outdated(&self) -> bool {
199 match &self.inner {
200 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
201 VersionedAssistantSettingsContent::V1(_) => true,
202 VersionedAssistantSettingsContent::V2(_) => false,
203 },
204 Some(AssistantSettingsContentInner::Legacy(_)) => true,
205 None => false,
206 }
207 }
208
209 fn upgrade(&self) -> AssistantSettingsContentV2 {
210 match &self.inner {
211 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
212 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
213 enabled: settings.enabled,
214 button: settings.button,
215 dock: settings.dock,
216 default_width: settings.default_width,
217 default_height: settings.default_width,
218 default_model: settings
219 .provider
220 .clone()
221 .and_then(|provider| match provider {
222 AssistantProviderContentV1::ZedDotDev { default_model } => {
223 default_model.map(|model| LanguageModelSelection {
224 provider: "zed.dev".into(),
225 model,
226 })
227 }
228 AssistantProviderContentV1::OpenAi { default_model, .. } => {
229 default_model.map(|model| LanguageModelSelection {
230 provider: "openai".into(),
231 model: model.id().to_string(),
232 })
233 }
234 AssistantProviderContentV1::Anthropic { default_model, .. } => {
235 default_model.map(|model| LanguageModelSelection {
236 provider: "anthropic".into(),
237 model: model.id().to_string(),
238 })
239 }
240 AssistantProviderContentV1::Ollama { default_model, .. } => {
241 default_model.map(|model| LanguageModelSelection {
242 provider: "ollama".into(),
243 model: model.id().to_string(),
244 })
245 }
246 AssistantProviderContentV1::LmStudio { default_model, .. } => {
247 default_model.map(|model| LanguageModelSelection {
248 provider: "lmstudio".into(),
249 model: model.id().to_string(),
250 })
251 }
252 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
253 default_model.map(|model| LanguageModelSelection {
254 provider: "deepseek".into(),
255 model: model.id().to_string(),
256 })
257 }
258 AssistantProviderContentV1::Mistral { default_model, .. } => {
259 default_model.map(|model| LanguageModelSelection {
260 provider: "mistral".into(),
261 model: model.id().to_string(),
262 })
263 }
264 }),
265 inline_assistant_model: None,
266 commit_message_model: None,
267 thread_summary_model: None,
268 inline_alternatives: None,
269 default_profile: None,
270 profiles: None,
271 always_allow_tool_actions: None,
272 notify_when_agent_waiting: None,
273 stream_edits: None,
274 single_file_review: None,
275 model_parameters: Vec::new(),
276 preferred_completion_mode: None,
277 enable_feedback: None,
278 },
279 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
280 },
281 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
282 enabled: None,
283 button: settings.button,
284 dock: settings.dock,
285 default_width: settings.default_width,
286 default_height: settings.default_height,
287 default_model: Some(LanguageModelSelection {
288 provider: "openai".into(),
289 model: settings
290 .default_open_ai_model
291 .clone()
292 .unwrap_or_default()
293 .id()
294 .to_string(),
295 }),
296 inline_assistant_model: None,
297 commit_message_model: None,
298 thread_summary_model: None,
299 inline_alternatives: None,
300 default_profile: None,
301 profiles: None,
302 always_allow_tool_actions: None,
303 notify_when_agent_waiting: None,
304 stream_edits: None,
305 single_file_review: None,
306 model_parameters: Vec::new(),
307 preferred_completion_mode: None,
308 enable_feedback: None,
309 },
310 None => AssistantSettingsContentV2::default(),
311 }
312 }
313
314 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
315 match &mut self.inner {
316 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
317 VersionedAssistantSettingsContent::V1(ref mut settings) => {
318 settings.dock = Some(dock);
319 }
320 VersionedAssistantSettingsContent::V2(ref mut settings) => {
321 settings.dock = Some(dock);
322 }
323 },
324 Some(AssistantSettingsContentInner::Legacy(settings)) => {
325 settings.dock = Some(dock);
326 }
327 None => {
328 self.inner = Some(AssistantSettingsContentInner::for_v2(
329 AssistantSettingsContentV2 {
330 dock: Some(dock),
331 ..Default::default()
332 },
333 ))
334 }
335 }
336 }
337
338 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
339 let model = language_model.id().0.to_string();
340 let provider = language_model.provider_id().0.to_string();
341
342 match &mut self.inner {
343 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
344 VersionedAssistantSettingsContent::V1(ref mut settings) => {
345 match provider.as_ref() {
346 "zed.dev" => {
347 log::warn!("attempted to set zed.dev model on outdated settings");
348 }
349 "anthropic" => {
350 let api_url = match &settings.provider {
351 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
352 api_url.clone()
353 }
354 _ => None,
355 };
356 settings.provider = Some(AssistantProviderContentV1::Anthropic {
357 default_model: AnthropicModel::from_id(&model).ok(),
358 api_url,
359 });
360 }
361 "ollama" => {
362 let api_url = match &settings.provider {
363 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
364 api_url.clone()
365 }
366 _ => None,
367 };
368 settings.provider = Some(AssistantProviderContentV1::Ollama {
369 default_model: Some(ollama::Model::new(
370 &model,
371 None,
372 None,
373 Some(language_model.supports_tools()),
374 )),
375 api_url,
376 });
377 }
378 "lmstudio" => {
379 let api_url = match &settings.provider {
380 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
381 api_url.clone()
382 }
383 _ => None,
384 };
385 settings.provider = Some(AssistantProviderContentV1::LmStudio {
386 default_model: Some(lmstudio::Model::new(
387 &model, None, None, false,
388 )),
389 api_url,
390 });
391 }
392 "openai" => {
393 let (api_url, available_models) = match &settings.provider {
394 Some(AssistantProviderContentV1::OpenAi {
395 api_url,
396 available_models,
397 ..
398 }) => (api_url.clone(), available_models.clone()),
399 _ => (None, None),
400 };
401 settings.provider = Some(AssistantProviderContentV1::OpenAi {
402 default_model: OpenAiModel::from_id(&model).ok(),
403 api_url,
404 available_models,
405 });
406 }
407 "deepseek" => {
408 let api_url = match &settings.provider {
409 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
410 api_url.clone()
411 }
412 _ => None,
413 };
414 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
415 default_model: DeepseekModel::from_id(&model).ok(),
416 api_url,
417 });
418 }
419 _ => {}
420 }
421 }
422 VersionedAssistantSettingsContent::V2(ref mut settings) => {
423 settings.default_model = Some(LanguageModelSelection {
424 provider: provider.into(),
425 model,
426 });
427 }
428 },
429 Some(AssistantSettingsContentInner::Legacy(settings)) => {
430 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
431 settings.default_open_ai_model = Some(model);
432 }
433 }
434 None => {
435 self.inner = Some(AssistantSettingsContentInner::for_v2(
436 AssistantSettingsContentV2 {
437 default_model: Some(LanguageModelSelection {
438 provider: provider.into(),
439 model,
440 }),
441 ..Default::default()
442 },
443 ));
444 }
445 }
446 }
447
448 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
449 self.v2_setting(|setting| {
450 setting.inline_assistant_model = Some(LanguageModelSelection {
451 provider: provider.into(),
452 model,
453 });
454 Ok(())
455 })
456 .ok();
457 }
458
459 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
460 self.v2_setting(|setting| {
461 setting.commit_message_model = Some(LanguageModelSelection {
462 provider: provider.into(),
463 model,
464 });
465 Ok(())
466 })
467 .ok();
468 }
469
470 pub fn v2_setting(
471 &mut self,
472 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
473 ) -> anyhow::Result<()> {
474 match self.inner.get_or_insert_with(|| {
475 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
476 ..Default::default()
477 })
478 }) {
479 AssistantSettingsContentInner::Versioned(boxed) => {
480 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
481 f(settings)
482 } else {
483 Ok(())
484 }
485 }
486 _ => Ok(()),
487 }
488 }
489
490 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
491 self.v2_setting(|setting| {
492 setting.thread_summary_model = Some(LanguageModelSelection {
493 provider: provider.into(),
494 model,
495 });
496 Ok(())
497 })
498 .ok();
499 }
500
501 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
502 self.v2_setting(|setting| {
503 setting.always_allow_tool_actions = Some(allow);
504 Ok(())
505 })
506 .ok();
507 }
508
509 pub fn set_single_file_review(&mut self, allow: bool) {
510 self.v2_setting(|setting| {
511 setting.single_file_review = Some(allow);
512 Ok(())
513 })
514 .ok();
515 }
516
517 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
518 self.v2_setting(|setting| {
519 setting.default_profile = Some(profile_id);
520 Ok(())
521 })
522 .ok();
523 }
524
525 pub fn create_profile(
526 &mut self,
527 profile_id: AgentProfileId,
528 profile: AgentProfile,
529 ) -> Result<()> {
530 self.v2_setting(|settings| {
531 let profiles = settings.profiles.get_or_insert_default();
532 if profiles.contains_key(&profile_id) {
533 bail!("profile with ID '{profile_id}' already exists");
534 }
535
536 profiles.insert(
537 profile_id,
538 AgentProfileContent {
539 name: profile.name.into(),
540 tools: profile.tools,
541 enable_all_context_servers: Some(profile.enable_all_context_servers),
542 context_servers: profile
543 .context_servers
544 .into_iter()
545 .map(|(server_id, preset)| {
546 (
547 server_id,
548 ContextServerPresetContent {
549 tools: preset.tools,
550 },
551 )
552 })
553 .collect(),
554 },
555 );
556
557 Ok(())
558 })
559 }
560}
561
562#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
563#[serde(tag = "version")]
564#[schemars(deny_unknown_fields)]
565pub enum VersionedAssistantSettingsContent {
566 #[serde(rename = "1")]
567 V1(AssistantSettingsContentV1),
568 #[serde(rename = "2")]
569 V2(AssistantSettingsContentV2),
570}
571
572impl Default for VersionedAssistantSettingsContent {
573 fn default() -> Self {
574 Self::V2(AssistantSettingsContentV2 {
575 enabled: None,
576 button: None,
577 dock: None,
578 default_width: None,
579 default_height: None,
580 default_model: None,
581 inline_assistant_model: None,
582 commit_message_model: None,
583 thread_summary_model: None,
584 inline_alternatives: None,
585 default_profile: None,
586 profiles: None,
587 always_allow_tool_actions: None,
588 notify_when_agent_waiting: None,
589 stream_edits: None,
590 single_file_review: None,
591 model_parameters: Vec::new(),
592 preferred_completion_mode: None,
593 enable_feedback: None,
594 })
595 }
596}
597
598#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
599#[schemars(deny_unknown_fields)]
600pub struct AssistantSettingsContentV2 {
601 /// Whether the Assistant is enabled.
602 ///
603 /// Default: true
604 enabled: Option<bool>,
605 /// Whether to show the assistant panel button in the status bar.
606 ///
607 /// Default: true
608 button: Option<bool>,
609 /// Where to dock the assistant.
610 ///
611 /// Default: right
612 dock: Option<AssistantDockPosition>,
613 /// Default width in pixels when the assistant is docked to the left or right.
614 ///
615 /// Default: 640
616 default_width: Option<f32>,
617 /// Default height in pixels when the assistant is docked to the bottom.
618 ///
619 /// Default: 320
620 default_height: Option<f32>,
621 /// The default model to use when creating new chats and for other features when a specific model is not specified.
622 default_model: Option<LanguageModelSelection>,
623 /// Model to use for the inline assistant. Defaults to default_model when not specified.
624 inline_assistant_model: Option<LanguageModelSelection>,
625 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
626 commit_message_model: Option<LanguageModelSelection>,
627 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
628 thread_summary_model: Option<LanguageModelSelection>,
629 /// Additional models with which to generate alternatives when performing inline assists.
630 inline_alternatives: Option<Vec<LanguageModelSelection>>,
631 /// The default profile to use in the Agent.
632 ///
633 /// Default: write
634 default_profile: Option<AgentProfileId>,
635 /// The available agent profiles.
636 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
637 /// Whenever a tool action would normally wait for your confirmation
638 /// that you allow it, always choose to allow it.
639 ///
640 /// Default: false
641 always_allow_tool_actions: Option<bool>,
642 /// Where to show a popup notification when the agent is waiting for user input.
643 ///
644 /// Default: "primary_screen"
645 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
646 /// Whether to stream edits from the agent as they are received.
647 ///
648 /// Default: false
649 stream_edits: Option<bool>,
650 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
651 ///
652 /// Default: true
653 single_file_review: Option<bool>,
654 /// Additional parameters for language model requests. When making a request
655 /// to a model, parameters will be taken from the last entry in this list
656 /// that matches the model's provider and name. In each entry, both provider
657 /// and model are optional, so that you can specify parameters for either
658 /// one.
659 ///
660 /// Default: []
661 #[serde(default)]
662 model_parameters: Vec<LanguageModelParameters>,
663
664 /// What completion mode to enable for new threads
665 ///
666 /// Default: normal
667 preferred_completion_mode: Option<CompletionMode>,
668 /// Whether to show thumb buttons for feedback in the agent panel.
669 ///
670 /// Default: true
671 enable_feedback: Option<bool>,
672}
673
674#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
675#[serde(rename_all = "snake_case")]
676pub enum CompletionMode {
677 #[default]
678 Normal,
679 Max,
680}
681
682impl From<CompletionMode> for zed_llm_client::CompletionMode {
683 fn from(value: CompletionMode) -> Self {
684 match value {
685 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
686 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
687 }
688 }
689}
690
691#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
692pub struct LanguageModelSelection {
693 pub provider: LanguageModelProviderSetting,
694 pub model: String,
695}
696
697#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
698pub struct LanguageModelProviderSetting(pub String);
699
700impl JsonSchema for LanguageModelProviderSetting {
701 fn schema_name() -> String {
702 "LanguageModelProviderSetting".into()
703 }
704
705 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
706 schemars::schema::SchemaObject {
707 enum_values: Some(vec![
708 "anthropic".into(),
709 "amazon-bedrock".into(),
710 "google".into(),
711 "lmstudio".into(),
712 "ollama".into(),
713 "openai".into(),
714 "zed.dev".into(),
715 "copilot_chat".into(),
716 "deepseek".into(),
717 "mistral".into(),
718 ]),
719 ..Default::default()
720 }
721 .into()
722 }
723}
724
725impl From<String> for LanguageModelProviderSetting {
726 fn from(provider: String) -> Self {
727 Self(provider)
728 }
729}
730
731impl From<&str> for LanguageModelProviderSetting {
732 fn from(provider: &str) -> Self {
733 Self(provider.to_string())
734 }
735}
736
737impl Default for LanguageModelSelection {
738 fn default() -> Self {
739 Self {
740 provider: LanguageModelProviderSetting("openai".to_string()),
741 model: "gpt-4".to_string(),
742 }
743 }
744}
745
746#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
747pub struct AgentProfileContent {
748 pub name: Arc<str>,
749 #[serde(default)]
750 pub tools: IndexMap<Arc<str>, bool>,
751 /// Whether all context servers are enabled by default.
752 pub enable_all_context_servers: Option<bool>,
753 #[serde(default)]
754 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
755}
756
757#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
758pub struct ContextServerPresetContent {
759 pub tools: IndexMap<Arc<str>, bool>,
760}
761
762#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
763#[schemars(deny_unknown_fields)]
764pub struct AssistantSettingsContentV1 {
765 /// Whether the Assistant is enabled.
766 ///
767 /// Default: true
768 enabled: Option<bool>,
769 /// Whether to show the assistant panel button in the status bar.
770 ///
771 /// Default: true
772 button: Option<bool>,
773 /// Where to dock the assistant.
774 ///
775 /// Default: right
776 dock: Option<AssistantDockPosition>,
777 /// Default width in pixels when the assistant is docked to the left or right.
778 ///
779 /// Default: 640
780 default_width: Option<f32>,
781 /// Default height in pixels when the assistant is docked to the bottom.
782 ///
783 /// Default: 320
784 default_height: Option<f32>,
785 /// The provider of the assistant service.
786 ///
787 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
788 /// each with their respective default models and configurations.
789 provider: Option<AssistantProviderContentV1>,
790}
791
792#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
793#[schemars(deny_unknown_fields)]
794pub struct LegacyAssistantSettingsContent {
795 /// Whether to show the assistant panel button in the status bar.
796 ///
797 /// Default: true
798 pub button: Option<bool>,
799 /// Where to dock the assistant.
800 ///
801 /// Default: right
802 pub dock: Option<AssistantDockPosition>,
803 /// Default width in pixels when the assistant is docked to the left or right.
804 ///
805 /// Default: 640
806 pub default_width: Option<f32>,
807 /// Default height in pixels when the assistant is docked to the bottom.
808 ///
809 /// Default: 320
810 pub default_height: Option<f32>,
811 /// The default OpenAI model to use when creating new chats.
812 ///
813 /// Default: gpt-4-1106-preview
814 pub default_open_ai_model: Option<OpenAiModel>,
815 /// OpenAI API base URL to use when creating new chats.
816 ///
817 /// Default: <https://api.openai.com/v1>
818 pub openai_api_url: Option<String>,
819}
820
821impl Settings for AssistantSettings {
822 const KEY: Option<&'static str> = Some("agent");
823
824 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
825
826 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
827
828 type FileContent = AssistantSettingsContent;
829
830 fn load(
831 sources: SettingsSources<Self::FileContent>,
832 _: &mut gpui::App,
833 ) -> anyhow::Result<Self> {
834 let mut settings = AssistantSettings::default();
835
836 for value in sources.defaults_and_customizations() {
837 if value.is_version_outdated() {
838 settings.using_outdated_settings_version = true;
839 }
840
841 let value = value.upgrade();
842 merge(&mut settings.enabled, value.enabled);
843 merge(&mut settings.button, value.button);
844 merge(&mut settings.dock, value.dock);
845 merge(
846 &mut settings.default_width,
847 value.default_width.map(Into::into),
848 );
849 merge(
850 &mut settings.default_height,
851 value.default_height.map(Into::into),
852 );
853 merge(&mut settings.default_model, value.default_model);
854 settings.inline_assistant_model = value
855 .inline_assistant_model
856 .or(settings.inline_assistant_model.take());
857 settings.commit_message_model = value
858 .commit_message_model
859 .or(settings.commit_message_model.take());
860 settings.thread_summary_model = value
861 .thread_summary_model
862 .or(settings.thread_summary_model.take());
863 merge(&mut settings.inline_alternatives, value.inline_alternatives);
864 merge(
865 &mut settings.always_allow_tool_actions,
866 value.always_allow_tool_actions,
867 );
868 merge(
869 &mut settings.notify_when_agent_waiting,
870 value.notify_when_agent_waiting,
871 );
872 merge(&mut settings.stream_edits, value.stream_edits);
873 merge(&mut settings.single_file_review, value.single_file_review);
874 merge(&mut settings.default_profile, value.default_profile);
875 merge(
876 &mut settings.preferred_completion_mode,
877 value.preferred_completion_mode,
878 );
879 merge(&mut settings.enable_feedback, value.enable_feedback);
880
881 settings
882 .model_parameters
883 .extend_from_slice(&value.model_parameters);
884
885 if let Some(profiles) = value.profiles {
886 settings
887 .profiles
888 .extend(profiles.into_iter().map(|(id, profile)| {
889 (
890 id,
891 AgentProfile {
892 name: profile.name.into(),
893 tools: profile.tools,
894 enable_all_context_servers: profile
895 .enable_all_context_servers
896 .unwrap_or_default(),
897 context_servers: profile
898 .context_servers
899 .into_iter()
900 .map(|(context_server_id, preset)| {
901 (
902 context_server_id,
903 ContextServerPreset {
904 tools: preset.tools.clone(),
905 },
906 )
907 })
908 .collect(),
909 },
910 )
911 }));
912 }
913 }
914
915 Ok(settings)
916 }
917
918 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
919 if let Some(b) = vscode
920 .read_value("chat.agent.enabled")
921 .and_then(|b| b.as_bool())
922 {
923 match &mut current.inner {
924 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
925 match versioned.as_mut() {
926 VersionedAssistantSettingsContent::V1(setting) => {
927 setting.enabled = Some(b);
928 setting.button = Some(b);
929 }
930
931 VersionedAssistantSettingsContent::V2(setting) => {
932 setting.enabled = Some(b);
933 setting.button = Some(b);
934 }
935 }
936 }
937 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
938 None => {
939 current.inner = Some(AssistantSettingsContentInner::for_v2(
940 AssistantSettingsContentV2 {
941 enabled: Some(b),
942 button: Some(b),
943 ..Default::default()
944 },
945 ));
946 }
947 }
948 }
949 }
950}
951
952fn merge<T>(target: &mut T, value: Option<T>) {
953 if let Some(value) = value {
954 *target = value;
955 }
956}
957
958#[cfg(test)]
959mod tests {
960 use fs::Fs;
961 use gpui::{ReadGlobal, TestAppContext};
962 use settings::SettingsStore;
963
964 use super::*;
965
966 #[gpui::test]
967 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
968 let fs = fs::FakeFs::new(cx.executor().clone());
969 fs.create_dir(paths::settings_file().parent().unwrap())
970 .await
971 .unwrap();
972
973 cx.update(|cx| {
974 let test_settings = settings::SettingsStore::test(cx);
975 cx.set_global(test_settings);
976 AssistantSettings::register(cx);
977 });
978
979 cx.update(|cx| {
980 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
981 assert_eq!(
982 AssistantSettings::get_global(cx).default_model,
983 LanguageModelSelection {
984 provider: "zed.dev".into(),
985 model: "claude-3-7-sonnet-latest".into(),
986 }
987 );
988 });
989
990 cx.update(|cx| {
991 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
992 fs.clone(),
993 |settings, _| {
994 *settings = AssistantSettingsContent {
995 inner: Some(AssistantSettingsContentInner::for_v2(
996 AssistantSettingsContentV2 {
997 default_model: Some(LanguageModelSelection {
998 provider: "test-provider".into(),
999 model: "gpt-99".into(),
1000 }),
1001 inline_assistant_model: None,
1002 commit_message_model: None,
1003 thread_summary_model: None,
1004 inline_alternatives: None,
1005 enabled: None,
1006 button: None,
1007 dock: None,
1008 default_width: None,
1009 default_height: None,
1010 default_profile: None,
1011 profiles: None,
1012 always_allow_tool_actions: None,
1013 notify_when_agent_waiting: None,
1014 stream_edits: None,
1015 single_file_review: None,
1016 enable_feedback: None,
1017 model_parameters: Vec::new(),
1018 preferred_completion_mode: None,
1019 },
1020 )),
1021 }
1022 },
1023 );
1024 });
1025
1026 cx.run_until_parked();
1027
1028 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1029 assert!(raw_settings_value.contains(r#""version": "2""#));
1030
1031 #[derive(Debug, Deserialize)]
1032 struct AssistantSettingsTest {
1033 agent: AssistantSettingsContent,
1034 }
1035
1036 let assistant_settings: AssistantSettingsTest =
1037 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1038
1039 assert!(!assistant_settings.agent.is_version_outdated());
1040 }
1041
1042 #[gpui::test]
1043 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1044 let fs = fs::FakeFs::new(cx.executor().clone());
1045 fs.create_dir(paths::settings_file().parent().unwrap())
1046 .await
1047 .unwrap();
1048
1049 cx.update(|cx| {
1050 let mut test_settings = settings::SettingsStore::test(cx);
1051 let user_settings_content = r#"{
1052 "assistant": {
1053 "enabled": true,
1054 "version": "2",
1055 "default_model": {
1056 "provider": "zed.dev",
1057 "model": "gpt-99"
1058 },
1059 }}"#;
1060 test_settings
1061 .set_user_settings(user_settings_content, cx)
1062 .unwrap();
1063 cx.set_global(test_settings);
1064 AssistantSettings::register(cx);
1065 });
1066
1067 cx.run_until_parked();
1068
1069 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1070 assert!(assistant_settings.enabled);
1071 assert!(!assistant_settings.using_outdated_settings_version);
1072 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1073
1074 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1075 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1076 *settings = AssistantSettingsContent {
1077 inner: Some(AssistantSettingsContentInner::for_v2(
1078 AssistantSettingsContentV2 {
1079 enabled: Some(false),
1080 default_model: Some(LanguageModelSelection {
1081 provider: "xai".to_owned().into(),
1082 model: "grok".to_owned(),
1083 }),
1084 ..Default::default()
1085 },
1086 )),
1087 };
1088 });
1089 });
1090
1091 cx.run_until_parked();
1092
1093 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1094
1095 #[derive(Debug, Deserialize)]
1096 struct AssistantSettingsTest {
1097 assistant: AssistantSettingsContent,
1098 agent: Option<serde_json_lenient::Value>,
1099 }
1100
1101 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1102 assert!(assistant_settings.agent.is_none());
1103 }
1104}