1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::LanguageModel;
12use lmstudio::Model as LmStudioModel;
13use mistral::Model as MistralModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21pub fn init(cx: &mut App) {
22 AssistantSettings::register(cx);
23}
24
25#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
26#[serde(rename_all = "snake_case")]
27pub enum AssistantDockPosition {
28 Left,
29 #[default]
30 Right,
31 Bottom,
32}
33
34#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
35#[serde(rename_all = "snake_case")]
36pub enum NotifyWhenAgentWaiting {
37 #[default]
38 PrimaryScreen,
39 AllScreens,
40 Never,
41}
42
43#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
44#[serde(tag = "name", rename_all = "snake_case")]
45#[schemars(deny_unknown_fields)]
46pub enum AssistantProviderContentV1 {
47 #[serde(rename = "zed.dev")]
48 ZedDotDev { default_model: Option<String> },
49 #[serde(rename = "openai")]
50 OpenAi {
51 default_model: Option<OpenAiModel>,
52 api_url: Option<String>,
53 available_models: Option<Vec<OpenAiModel>>,
54 },
55 #[serde(rename = "anthropic")]
56 Anthropic {
57 default_model: Option<AnthropicModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "ollama")]
61 Ollama {
62 default_model: Option<OllamaModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "lmstudio")]
66 LmStudio {
67 default_model: Option<LmStudioModel>,
68 api_url: Option<String>,
69 },
70 #[serde(rename = "deepseek")]
71 DeepSeek {
72 default_model: Option<DeepseekModel>,
73 api_url: Option<String>,
74 },
75 #[serde(rename = "mistral")]
76 Mistral {
77 default_model: Option<MistralModel>,
78 api_url: Option<String>,
79 },
80}
81
82#[derive(Default, Clone, Debug)]
83pub struct AssistantSettings {
84 pub enabled: bool,
85 pub button: bool,
86 pub dock: AssistantDockPosition,
87 pub default_width: Pixels,
88 pub default_height: Pixels,
89 pub default_model: LanguageModelSelection,
90 pub inline_assistant_model: Option<LanguageModelSelection>,
91 pub commit_message_model: Option<LanguageModelSelection>,
92 pub thread_summary_model: Option<LanguageModelSelection>,
93 pub inline_alternatives: Vec<LanguageModelSelection>,
94 pub using_outdated_settings_version: bool,
95 pub default_profile: AgentProfileId,
96 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
97 pub always_allow_tool_actions: bool,
98 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
99 pub stream_edits: bool,
100 pub single_file_review: bool,
101 pub model_parameters: Vec<LanguageModelParameters>,
102 pub preferred_completion_mode: CompletionMode,
103 pub enable_feedback: bool,
104}
105
106impl AssistantSettings {
107 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
108 let settings = Self::get_global(cx);
109 settings
110 .model_parameters
111 .iter()
112 .rfind(|setting| setting.matches(model))
113 .and_then(|m| m.temperature)
114 }
115
116 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
117 self.inline_assistant_model = Some(LanguageModelSelection {
118 provider: provider.into(),
119 model,
120 });
121 }
122
123 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
124 self.commit_message_model = Some(LanguageModelSelection {
125 provider: provider.into(),
126 model,
127 });
128 }
129
130 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
131 self.thread_summary_model = Some(LanguageModelSelection {
132 provider: provider.into(),
133 model,
134 });
135 }
136}
137
138#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
139pub struct LanguageModelParameters {
140 pub provider: Option<LanguageModelProviderSetting>,
141 pub model: Option<SharedString>,
142 pub temperature: Option<f32>,
143}
144
145impl LanguageModelParameters {
146 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
147 if let Some(provider) = &self.provider {
148 if provider.0 != model.provider_id().0 {
149 return false;
150 }
151 }
152 if let Some(setting_model) = &self.model {
153 if *setting_model != model.id().0 {
154 return false;
155 }
156 }
157 true
158 }
159}
160
161/// Assistant panel settings
162#[derive(Clone, Serialize, Deserialize, Debug, Default)]
163pub struct AssistantSettingsContent {
164 #[serde(flatten)]
165 pub inner: Option<AssistantSettingsContentInner>,
166}
167
168#[derive(Clone, Serialize, Deserialize, Debug)]
169#[serde(untagged)]
170pub enum AssistantSettingsContentInner {
171 Versioned(Box<VersionedAssistantSettingsContent>),
172 Legacy(LegacyAssistantSettingsContent),
173}
174
175impl AssistantSettingsContentInner {
176 fn for_v2(content: AssistantSettingsContentV2) -> Self {
177 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
178 content,
179 )))
180 }
181}
182
183impl JsonSchema for AssistantSettingsContent {
184 fn schema_name() -> String {
185 VersionedAssistantSettingsContent::schema_name()
186 }
187
188 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
189 VersionedAssistantSettingsContent::json_schema(r#gen)
190 }
191
192 fn is_referenceable() -> bool {
193 VersionedAssistantSettingsContent::is_referenceable()
194 }
195}
196
197impl AssistantSettingsContent {
198 pub fn is_version_outdated(&self) -> bool {
199 match &self.inner {
200 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
201 VersionedAssistantSettingsContent::V1(_) => true,
202 VersionedAssistantSettingsContent::V2(_) => false,
203 },
204 Some(AssistantSettingsContentInner::Legacy(_)) => true,
205 None => false,
206 }
207 }
208
209 fn upgrade(&self) -> AssistantSettingsContentV2 {
210 match &self.inner {
211 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
212 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
213 enabled: settings.enabled,
214 button: settings.button,
215 dock: settings.dock,
216 default_width: settings.default_width,
217 default_height: settings.default_width,
218 default_model: settings
219 .provider
220 .clone()
221 .and_then(|provider| match provider {
222 AssistantProviderContentV1::ZedDotDev { default_model } => {
223 default_model.map(|model| LanguageModelSelection {
224 provider: "zed.dev".into(),
225 model,
226 })
227 }
228 AssistantProviderContentV1::OpenAi { default_model, .. } => {
229 default_model.map(|model| LanguageModelSelection {
230 provider: "openai".into(),
231 model: model.id().to_string(),
232 })
233 }
234 AssistantProviderContentV1::Anthropic { default_model, .. } => {
235 default_model.map(|model| LanguageModelSelection {
236 provider: "anthropic".into(),
237 model: model.id().to_string(),
238 })
239 }
240 AssistantProviderContentV1::Ollama { default_model, .. } => {
241 default_model.map(|model| LanguageModelSelection {
242 provider: "ollama".into(),
243 model: model.id().to_string(),
244 })
245 }
246 AssistantProviderContentV1::LmStudio { default_model, .. } => {
247 default_model.map(|model| LanguageModelSelection {
248 provider: "lmstudio".into(),
249 model: model.id().to_string(),
250 })
251 }
252 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
253 default_model.map(|model| LanguageModelSelection {
254 provider: "deepseek".into(),
255 model: model.id().to_string(),
256 })
257 }
258 AssistantProviderContentV1::Mistral { default_model, .. } => {
259 default_model.map(|model| LanguageModelSelection {
260 provider: "mistral".into(),
261 model: model.id().to_string(),
262 })
263 }
264 }),
265 inline_assistant_model: None,
266 commit_message_model: None,
267 thread_summary_model: None,
268 inline_alternatives: None,
269 default_profile: None,
270 profiles: None,
271 always_allow_tool_actions: None,
272 notify_when_agent_waiting: None,
273 stream_edits: None,
274 single_file_review: None,
275 model_parameters: Vec::new(),
276 preferred_completion_mode: None,
277 enable_feedback: None,
278 },
279 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
280 },
281 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
282 enabled: None,
283 button: settings.button,
284 dock: settings.dock,
285 default_width: settings.default_width,
286 default_height: settings.default_height,
287 default_model: Some(LanguageModelSelection {
288 provider: "openai".into(),
289 model: settings
290 .default_open_ai_model
291 .clone()
292 .unwrap_or_default()
293 .id()
294 .to_string(),
295 }),
296 inline_assistant_model: None,
297 commit_message_model: None,
298 thread_summary_model: None,
299 inline_alternatives: None,
300 default_profile: None,
301 profiles: None,
302 always_allow_tool_actions: None,
303 notify_when_agent_waiting: None,
304 stream_edits: None,
305 single_file_review: None,
306 model_parameters: Vec::new(),
307 preferred_completion_mode: None,
308 enable_feedback: None,
309 },
310 None => AssistantSettingsContentV2::default(),
311 }
312 }
313
314 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
315 match &mut self.inner {
316 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
317 VersionedAssistantSettingsContent::V1(ref mut settings) => {
318 settings.dock = Some(dock);
319 }
320 VersionedAssistantSettingsContent::V2(ref mut settings) => {
321 settings.dock = Some(dock);
322 }
323 },
324 Some(AssistantSettingsContentInner::Legacy(settings)) => {
325 settings.dock = Some(dock);
326 }
327 None => {
328 self.inner = Some(AssistantSettingsContentInner::for_v2(
329 AssistantSettingsContentV2 {
330 dock: Some(dock),
331 ..Default::default()
332 },
333 ))
334 }
335 }
336 }
337
338 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
339 let model = language_model.id().0.to_string();
340 let provider = language_model.provider_id().0.to_string();
341
342 match &mut self.inner {
343 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
344 VersionedAssistantSettingsContent::V1(ref mut settings) => {
345 match provider.as_ref() {
346 "zed.dev" => {
347 log::warn!("attempted to set zed.dev model on outdated settings");
348 }
349 "anthropic" => {
350 let api_url = match &settings.provider {
351 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
352 api_url.clone()
353 }
354 _ => None,
355 };
356 settings.provider = Some(AssistantProviderContentV1::Anthropic {
357 default_model: AnthropicModel::from_id(&model).ok(),
358 api_url,
359 });
360 }
361 "ollama" => {
362 let api_url = match &settings.provider {
363 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
364 api_url.clone()
365 }
366 _ => None,
367 };
368 settings.provider = Some(AssistantProviderContentV1::Ollama {
369 default_model: Some(ollama::Model::new(
370 &model,
371 None,
372 None,
373 Some(language_model.supports_tools()),
374 )),
375 api_url,
376 });
377 }
378 "lmstudio" => {
379 let api_url = match &settings.provider {
380 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
381 api_url.clone()
382 }
383 _ => None,
384 };
385 settings.provider = Some(AssistantProviderContentV1::LmStudio {
386 default_model: Some(lmstudio::Model::new(&model, None, None)),
387 api_url,
388 });
389 }
390 "openai" => {
391 let (api_url, available_models) = match &settings.provider {
392 Some(AssistantProviderContentV1::OpenAi {
393 api_url,
394 available_models,
395 ..
396 }) => (api_url.clone(), available_models.clone()),
397 _ => (None, None),
398 };
399 settings.provider = Some(AssistantProviderContentV1::OpenAi {
400 default_model: OpenAiModel::from_id(&model).ok(),
401 api_url,
402 available_models,
403 });
404 }
405 "deepseek" => {
406 let api_url = match &settings.provider {
407 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
408 api_url.clone()
409 }
410 _ => None,
411 };
412 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
413 default_model: DeepseekModel::from_id(&model).ok(),
414 api_url,
415 });
416 }
417 _ => {}
418 }
419 }
420 VersionedAssistantSettingsContent::V2(ref mut settings) => {
421 settings.default_model = Some(LanguageModelSelection {
422 provider: provider.into(),
423 model,
424 });
425 }
426 },
427 Some(AssistantSettingsContentInner::Legacy(settings)) => {
428 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
429 settings.default_open_ai_model = Some(model);
430 }
431 }
432 None => {
433 self.inner = Some(AssistantSettingsContentInner::for_v2(
434 AssistantSettingsContentV2 {
435 default_model: Some(LanguageModelSelection {
436 provider: provider.into(),
437 model,
438 }),
439 ..Default::default()
440 },
441 ));
442 }
443 }
444 }
445
446 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
447 self.v2_setting(|setting| {
448 setting.inline_assistant_model = Some(LanguageModelSelection {
449 provider: provider.into(),
450 model,
451 });
452 Ok(())
453 })
454 .ok();
455 }
456
457 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
458 self.v2_setting(|setting| {
459 setting.commit_message_model = Some(LanguageModelSelection {
460 provider: provider.into(),
461 model,
462 });
463 Ok(())
464 })
465 .ok();
466 }
467
468 pub fn v2_setting(
469 &mut self,
470 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
471 ) -> anyhow::Result<()> {
472 match self.inner.get_or_insert_with(|| {
473 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
474 ..Default::default()
475 })
476 }) {
477 AssistantSettingsContentInner::Versioned(boxed) => {
478 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
479 f(settings)
480 } else {
481 Ok(())
482 }
483 }
484 _ => Ok(()),
485 }
486 }
487
488 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
489 self.v2_setting(|setting| {
490 setting.thread_summary_model = Some(LanguageModelSelection {
491 provider: provider.into(),
492 model,
493 });
494 Ok(())
495 })
496 .ok();
497 }
498
499 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
500 self.v2_setting(|setting| {
501 setting.always_allow_tool_actions = Some(allow);
502 Ok(())
503 })
504 .ok();
505 }
506
507 pub fn set_single_file_review(&mut self, allow: bool) {
508 self.v2_setting(|setting| {
509 setting.single_file_review = Some(allow);
510 Ok(())
511 })
512 .ok();
513 }
514
515 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
516 self.v2_setting(|setting| {
517 setting.default_profile = Some(profile_id);
518 Ok(())
519 })
520 .ok();
521 }
522
523 pub fn create_profile(
524 &mut self,
525 profile_id: AgentProfileId,
526 profile: AgentProfile,
527 ) -> Result<()> {
528 self.v2_setting(|settings| {
529 let profiles = settings.profiles.get_or_insert_default();
530 if profiles.contains_key(&profile_id) {
531 bail!("profile with ID '{profile_id}' already exists");
532 }
533
534 profiles.insert(
535 profile_id,
536 AgentProfileContent {
537 name: profile.name.into(),
538 tools: profile.tools,
539 enable_all_context_servers: Some(profile.enable_all_context_servers),
540 context_servers: profile
541 .context_servers
542 .into_iter()
543 .map(|(server_id, preset)| {
544 (
545 server_id,
546 ContextServerPresetContent {
547 tools: preset.tools,
548 },
549 )
550 })
551 .collect(),
552 },
553 );
554
555 Ok(())
556 })
557 }
558}
559
560#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
561#[serde(tag = "version")]
562#[schemars(deny_unknown_fields)]
563pub enum VersionedAssistantSettingsContent {
564 #[serde(rename = "1")]
565 V1(AssistantSettingsContentV1),
566 #[serde(rename = "2")]
567 V2(AssistantSettingsContentV2),
568}
569
570impl Default for VersionedAssistantSettingsContent {
571 fn default() -> Self {
572 Self::V2(AssistantSettingsContentV2 {
573 enabled: None,
574 button: None,
575 dock: None,
576 default_width: None,
577 default_height: None,
578 default_model: None,
579 inline_assistant_model: None,
580 commit_message_model: None,
581 thread_summary_model: None,
582 inline_alternatives: None,
583 default_profile: None,
584 profiles: None,
585 always_allow_tool_actions: None,
586 notify_when_agent_waiting: None,
587 stream_edits: None,
588 single_file_review: None,
589 model_parameters: Vec::new(),
590 preferred_completion_mode: None,
591 enable_feedback: None,
592 })
593 }
594}
595
596#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
597#[schemars(deny_unknown_fields)]
598pub struct AssistantSettingsContentV2 {
599 /// Whether the Assistant is enabled.
600 ///
601 /// Default: true
602 enabled: Option<bool>,
603 /// Whether to show the assistant panel button in the status bar.
604 ///
605 /// Default: true
606 button: Option<bool>,
607 /// Where to dock the assistant.
608 ///
609 /// Default: right
610 dock: Option<AssistantDockPosition>,
611 /// Default width in pixels when the assistant is docked to the left or right.
612 ///
613 /// Default: 640
614 default_width: Option<f32>,
615 /// Default height in pixels when the assistant is docked to the bottom.
616 ///
617 /// Default: 320
618 default_height: Option<f32>,
619 /// The default model to use when creating new chats and for other features when a specific model is not specified.
620 default_model: Option<LanguageModelSelection>,
621 /// Model to use for the inline assistant. Defaults to default_model when not specified.
622 inline_assistant_model: Option<LanguageModelSelection>,
623 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
624 commit_message_model: Option<LanguageModelSelection>,
625 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
626 thread_summary_model: Option<LanguageModelSelection>,
627 /// Additional models with which to generate alternatives when performing inline assists.
628 inline_alternatives: Option<Vec<LanguageModelSelection>>,
629 /// The default profile to use in the Agent.
630 ///
631 /// Default: write
632 default_profile: Option<AgentProfileId>,
633 /// The available agent profiles.
634 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
635 /// Whenever a tool action would normally wait for your confirmation
636 /// that you allow it, always choose to allow it.
637 ///
638 /// Default: false
639 always_allow_tool_actions: Option<bool>,
640 /// Where to show a popup notification when the agent is waiting for user input.
641 ///
642 /// Default: "primary_screen"
643 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
644 /// Whether to stream edits from the agent as they are received.
645 ///
646 /// Default: false
647 stream_edits: Option<bool>,
648 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
649 ///
650 /// Default: true
651 single_file_review: Option<bool>,
652 /// Additional parameters for language model requests. When making a request
653 /// to a model, parameters will be taken from the last entry in this list
654 /// that matches the model's provider and name. In each entry, both provider
655 /// and model are optional, so that you can specify parameters for either
656 /// one.
657 ///
658 /// Default: []
659 #[serde(default)]
660 model_parameters: Vec<LanguageModelParameters>,
661
662 /// What completion mode to enable for new threads
663 ///
664 /// Default: normal
665 preferred_completion_mode: Option<CompletionMode>,
666 /// Whether to show thumb buttons for feedback in the agent panel.
667 ///
668 /// Default: true
669 enable_feedback: Option<bool>,
670}
671
672#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
673#[serde(rename_all = "snake_case")]
674pub enum CompletionMode {
675 #[default]
676 Normal,
677 Max,
678}
679
680impl From<CompletionMode> for zed_llm_client::CompletionMode {
681 fn from(value: CompletionMode) -> Self {
682 match value {
683 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
684 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
685 }
686 }
687}
688
689#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
690pub struct LanguageModelSelection {
691 pub provider: LanguageModelProviderSetting,
692 pub model: String,
693}
694
695#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
696pub struct LanguageModelProviderSetting(pub String);
697
698impl JsonSchema for LanguageModelProviderSetting {
699 fn schema_name() -> String {
700 "LanguageModelProviderSetting".into()
701 }
702
703 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
704 schemars::schema::SchemaObject {
705 enum_values: Some(vec![
706 "anthropic".into(),
707 "amazon-bedrock".into(),
708 "google".into(),
709 "lmstudio".into(),
710 "ollama".into(),
711 "openai".into(),
712 "zed.dev".into(),
713 "copilot_chat".into(),
714 "deepseek".into(),
715 "mistral".into(),
716 ]),
717 ..Default::default()
718 }
719 .into()
720 }
721}
722
723impl From<String> for LanguageModelProviderSetting {
724 fn from(provider: String) -> Self {
725 Self(provider)
726 }
727}
728
729impl From<&str> for LanguageModelProviderSetting {
730 fn from(provider: &str) -> Self {
731 Self(provider.to_string())
732 }
733}
734
735impl Default for LanguageModelSelection {
736 fn default() -> Self {
737 Self {
738 provider: LanguageModelProviderSetting("openai".to_string()),
739 model: "gpt-4".to_string(),
740 }
741 }
742}
743
744#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
745pub struct AgentProfileContent {
746 pub name: Arc<str>,
747 #[serde(default)]
748 pub tools: IndexMap<Arc<str>, bool>,
749 /// Whether all context servers are enabled by default.
750 pub enable_all_context_servers: Option<bool>,
751 #[serde(default)]
752 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
753}
754
755#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
756pub struct ContextServerPresetContent {
757 pub tools: IndexMap<Arc<str>, bool>,
758}
759
760#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
761#[schemars(deny_unknown_fields)]
762pub struct AssistantSettingsContentV1 {
763 /// Whether the Assistant is enabled.
764 ///
765 /// Default: true
766 enabled: Option<bool>,
767 /// Whether to show the assistant panel button in the status bar.
768 ///
769 /// Default: true
770 button: Option<bool>,
771 /// Where to dock the assistant.
772 ///
773 /// Default: right
774 dock: Option<AssistantDockPosition>,
775 /// Default width in pixels when the assistant is docked to the left or right.
776 ///
777 /// Default: 640
778 default_width: Option<f32>,
779 /// Default height in pixels when the assistant is docked to the bottom.
780 ///
781 /// Default: 320
782 default_height: Option<f32>,
783 /// The provider of the assistant service.
784 ///
785 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
786 /// each with their respective default models and configurations.
787 provider: Option<AssistantProviderContentV1>,
788}
789
790#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
791#[schemars(deny_unknown_fields)]
792pub struct LegacyAssistantSettingsContent {
793 /// Whether to show the assistant panel button in the status bar.
794 ///
795 /// Default: true
796 pub button: Option<bool>,
797 /// Where to dock the assistant.
798 ///
799 /// Default: right
800 pub dock: Option<AssistantDockPosition>,
801 /// Default width in pixels when the assistant is docked to the left or right.
802 ///
803 /// Default: 640
804 pub default_width: Option<f32>,
805 /// Default height in pixels when the assistant is docked to the bottom.
806 ///
807 /// Default: 320
808 pub default_height: Option<f32>,
809 /// The default OpenAI model to use when creating new chats.
810 ///
811 /// Default: gpt-4-1106-preview
812 pub default_open_ai_model: Option<OpenAiModel>,
813 /// OpenAI API base URL to use when creating new chats.
814 ///
815 /// Default: <https://api.openai.com/v1>
816 pub openai_api_url: Option<String>,
817}
818
819impl Settings for AssistantSettings {
820 const KEY: Option<&'static str> = Some("agent");
821
822 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
823
824 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
825
826 type FileContent = AssistantSettingsContent;
827
828 fn load(
829 sources: SettingsSources<Self::FileContent>,
830 _: &mut gpui::App,
831 ) -> anyhow::Result<Self> {
832 let mut settings = AssistantSettings::default();
833
834 for value in sources.defaults_and_customizations() {
835 if value.is_version_outdated() {
836 settings.using_outdated_settings_version = true;
837 }
838
839 let value = value.upgrade();
840 merge(&mut settings.enabled, value.enabled);
841 merge(&mut settings.button, value.button);
842 merge(&mut settings.dock, value.dock);
843 merge(
844 &mut settings.default_width,
845 value.default_width.map(Into::into),
846 );
847 merge(
848 &mut settings.default_height,
849 value.default_height.map(Into::into),
850 );
851 merge(&mut settings.default_model, value.default_model);
852 settings.inline_assistant_model = value
853 .inline_assistant_model
854 .or(settings.inline_assistant_model.take());
855 settings.commit_message_model = value
856 .commit_message_model
857 .or(settings.commit_message_model.take());
858 settings.thread_summary_model = value
859 .thread_summary_model
860 .or(settings.thread_summary_model.take());
861 merge(&mut settings.inline_alternatives, value.inline_alternatives);
862 merge(
863 &mut settings.always_allow_tool_actions,
864 value.always_allow_tool_actions,
865 );
866 merge(
867 &mut settings.notify_when_agent_waiting,
868 value.notify_when_agent_waiting,
869 );
870 merge(&mut settings.stream_edits, value.stream_edits);
871 merge(&mut settings.single_file_review, value.single_file_review);
872 merge(&mut settings.default_profile, value.default_profile);
873 merge(
874 &mut settings.preferred_completion_mode,
875 value.preferred_completion_mode,
876 );
877 merge(&mut settings.enable_feedback, value.enable_feedback);
878
879 settings
880 .model_parameters
881 .extend_from_slice(&value.model_parameters);
882
883 if let Some(profiles) = value.profiles {
884 settings
885 .profiles
886 .extend(profiles.into_iter().map(|(id, profile)| {
887 (
888 id,
889 AgentProfile {
890 name: profile.name.into(),
891 tools: profile.tools,
892 enable_all_context_servers: profile
893 .enable_all_context_servers
894 .unwrap_or_default(),
895 context_servers: profile
896 .context_servers
897 .into_iter()
898 .map(|(context_server_id, preset)| {
899 (
900 context_server_id,
901 ContextServerPreset {
902 tools: preset.tools.clone(),
903 },
904 )
905 })
906 .collect(),
907 },
908 )
909 }));
910 }
911 }
912
913 Ok(settings)
914 }
915
916 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
917 if let Some(b) = vscode
918 .read_value("chat.agent.enabled")
919 .and_then(|b| b.as_bool())
920 {
921 match &mut current.inner {
922 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
923 match versioned.as_mut() {
924 VersionedAssistantSettingsContent::V1(setting) => {
925 setting.enabled = Some(b);
926 setting.button = Some(b);
927 }
928
929 VersionedAssistantSettingsContent::V2(setting) => {
930 setting.enabled = Some(b);
931 setting.button = Some(b);
932 }
933 }
934 }
935 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
936 None => {
937 current.inner = Some(AssistantSettingsContentInner::for_v2(
938 AssistantSettingsContentV2 {
939 enabled: Some(b),
940 button: Some(b),
941 ..Default::default()
942 },
943 ));
944 }
945 }
946 }
947 }
948}
949
950fn merge<T>(target: &mut T, value: Option<T>) {
951 if let Some(value) = value {
952 *target = value;
953 }
954}
955
956#[cfg(test)]
957mod tests {
958 use fs::Fs;
959 use gpui::{ReadGlobal, TestAppContext};
960 use settings::SettingsStore;
961
962 use super::*;
963
964 #[gpui::test]
965 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
966 let fs = fs::FakeFs::new(cx.executor().clone());
967 fs.create_dir(paths::settings_file().parent().unwrap())
968 .await
969 .unwrap();
970
971 cx.update(|cx| {
972 let test_settings = settings::SettingsStore::test(cx);
973 cx.set_global(test_settings);
974 AssistantSettings::register(cx);
975 });
976
977 cx.update(|cx| {
978 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
979 assert_eq!(
980 AssistantSettings::get_global(cx).default_model,
981 LanguageModelSelection {
982 provider: "zed.dev".into(),
983 model: "claude-3-7-sonnet-latest".into(),
984 }
985 );
986 });
987
988 cx.update(|cx| {
989 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
990 fs.clone(),
991 |settings, _| {
992 *settings = AssistantSettingsContent {
993 inner: Some(AssistantSettingsContentInner::for_v2(
994 AssistantSettingsContentV2 {
995 default_model: Some(LanguageModelSelection {
996 provider: "test-provider".into(),
997 model: "gpt-99".into(),
998 }),
999 inline_assistant_model: None,
1000 commit_message_model: None,
1001 thread_summary_model: None,
1002 inline_alternatives: None,
1003 enabled: None,
1004 button: None,
1005 dock: None,
1006 default_width: None,
1007 default_height: None,
1008 default_profile: None,
1009 profiles: None,
1010 always_allow_tool_actions: None,
1011 notify_when_agent_waiting: None,
1012 stream_edits: None,
1013 single_file_review: None,
1014 enable_feedback: None,
1015 model_parameters: Vec::new(),
1016 preferred_completion_mode: None,
1017 },
1018 )),
1019 }
1020 },
1021 );
1022 });
1023
1024 cx.run_until_parked();
1025
1026 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1027 assert!(raw_settings_value.contains(r#""version": "2""#));
1028
1029 #[derive(Debug, Deserialize)]
1030 struct AssistantSettingsTest {
1031 agent: AssistantSettingsContent,
1032 }
1033
1034 let assistant_settings: AssistantSettingsTest =
1035 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1036
1037 assert!(!assistant_settings.agent.is_version_outdated());
1038 }
1039
1040 #[gpui::test]
1041 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1042 let fs = fs::FakeFs::new(cx.executor().clone());
1043 fs.create_dir(paths::settings_file().parent().unwrap())
1044 .await
1045 .unwrap();
1046
1047 cx.update(|cx| {
1048 let mut test_settings = settings::SettingsStore::test(cx);
1049 let user_settings_content = r#"{
1050 "assistant": {
1051 "enabled": true,
1052 "version": "2",
1053 "default_model": {
1054 "provider": "zed.dev",
1055 "model": "gpt-99"
1056 },
1057 }}"#;
1058 test_settings
1059 .set_user_settings(user_settings_content, cx)
1060 .unwrap();
1061 cx.set_global(test_settings);
1062 AssistantSettings::register(cx);
1063 });
1064
1065 cx.run_until_parked();
1066
1067 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1068 assert!(assistant_settings.enabled);
1069 assert!(!assistant_settings.using_outdated_settings_version);
1070 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1071
1072 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1073 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1074 *settings = AssistantSettingsContent {
1075 inner: Some(AssistantSettingsContentInner::for_v2(
1076 AssistantSettingsContentV2 {
1077 enabled: Some(false),
1078 default_model: Some(LanguageModelSelection {
1079 provider: "xai".to_owned().into(),
1080 model: "grok".to_owned(),
1081 }),
1082 ..Default::default()
1083 },
1084 )),
1085 };
1086 });
1087 });
1088
1089 cx.run_until_parked();
1090
1091 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1092
1093 #[derive(Debug, Deserialize)]
1094 struct AssistantSettingsTest {
1095 assistant: AssistantSettingsContent,
1096 agent: Option<serde_json_lenient::Value>,
1097 }
1098
1099 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1100 assert!(assistant_settings.agent.is_none());
1101 }
1102}