1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use feature_flags::{AgentStreamEditsFeatureFlag, Assistant2FeatureFlag, FeatureFlagAppExt};
11use gpui::{App, Pixels, SharedString};
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21pub fn init(cx: &mut App) {
22 AssistantSettings::register(cx);
23}
24
25#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
26#[serde(rename_all = "snake_case")]
27pub enum AssistantDockPosition {
28 Left,
29 #[default]
30 Right,
31 Bottom,
32}
33
34#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
35#[serde(rename_all = "snake_case")]
36pub enum NotifyWhenAgentWaiting {
37 #[default]
38 PrimaryScreen,
39 AllScreens,
40 Never,
41}
42
43#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
44#[serde(tag = "name", rename_all = "snake_case")]
45pub enum AssistantProviderContentV1 {
46 #[serde(rename = "zed.dev")]
47 ZedDotDev { default_model: Option<CloudModel> },
48 #[serde(rename = "openai")]
49 OpenAi {
50 default_model: Option<OpenAiModel>,
51 api_url: Option<String>,
52 available_models: Option<Vec<OpenAiModel>>,
53 },
54 #[serde(rename = "anthropic")]
55 Anthropic {
56 default_model: Option<AnthropicModel>,
57 api_url: Option<String>,
58 },
59 #[serde(rename = "ollama")]
60 Ollama {
61 default_model: Option<OllamaModel>,
62 api_url: Option<String>,
63 },
64 #[serde(rename = "lmstudio")]
65 LmStudio {
66 default_model: Option<LmStudioModel>,
67 api_url: Option<String>,
68 },
69 #[serde(rename = "deepseek")]
70 DeepSeek {
71 default_model: Option<DeepseekModel>,
72 api_url: Option<String>,
73 },
74}
75
76#[derive(Default, Clone, Debug)]
77pub struct AssistantSettings {
78 pub enabled: bool,
79 pub button: bool,
80 pub dock: AssistantDockPosition,
81 pub default_width: Pixels,
82 pub default_height: Pixels,
83 pub default_model: LanguageModelSelection,
84 pub inline_assistant_model: Option<LanguageModelSelection>,
85 pub commit_message_model: Option<LanguageModelSelection>,
86 pub thread_summary_model: Option<LanguageModelSelection>,
87 pub inline_alternatives: Vec<LanguageModelSelection>,
88 pub using_outdated_settings_version: bool,
89 pub enable_experimental_live_diffs: bool,
90 pub default_profile: AgentProfileId,
91 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
92 pub always_allow_tool_actions: bool,
93 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
94 pub stream_edits: bool,
95 pub single_file_review: bool,
96 pub model_parameters: Vec<LanguageModelParameters>,
97 pub preferred_completion_mode: CompletionMode,
98}
99
100impl AssistantSettings {
101 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
102 let settings = Self::get_global(cx);
103 settings
104 .model_parameters
105 .iter()
106 .rfind(|setting| setting.matches(model))
107 .and_then(|m| m.temperature)
108 }
109
110 pub fn stream_edits(&self, cx: &App) -> bool {
111 cx.has_flag::<AgentStreamEditsFeatureFlag>() || self.stream_edits
112 }
113
114 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
115 if cx.has_flag::<Assistant2FeatureFlag>() {
116 return false;
117 }
118
119 cx.is_staff() || self.enable_experimental_live_diffs
120 }
121
122 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
123 self.inline_assistant_model = Some(LanguageModelSelection {
124 provider: provider.into(),
125 model,
126 });
127 }
128
129 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
130 self.commit_message_model = Some(LanguageModelSelection {
131 provider: provider.into(),
132 model,
133 });
134 }
135
136 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
137 self.thread_summary_model = Some(LanguageModelSelection {
138 provider: provider.into(),
139 model,
140 });
141 }
142}
143
144#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
145pub struct LanguageModelParameters {
146 pub provider: Option<LanguageModelProviderSetting>,
147 pub model: Option<SharedString>,
148 pub temperature: Option<f32>,
149}
150
151impl LanguageModelParameters {
152 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
153 if let Some(provider) = &self.provider {
154 if provider.0 != model.provider_id().0 {
155 return false;
156 }
157 }
158 if let Some(setting_model) = &self.model {
159 if *setting_model != model.id().0 {
160 return false;
161 }
162 }
163 true
164 }
165}
166
167/// Assistant panel settings
168#[derive(Clone, Serialize, Deserialize, Debug, Default)]
169pub struct AssistantSettingsContent {
170 #[serde(flatten)]
171 pub inner: Option<AssistantSettingsContentInner>,
172}
173
174#[derive(Clone, Serialize, Deserialize, Debug)]
175#[serde(untagged)]
176pub enum AssistantSettingsContentInner {
177 Versioned(Box<VersionedAssistantSettingsContent>),
178 Legacy(LegacyAssistantSettingsContent),
179}
180
181impl AssistantSettingsContentInner {
182 fn for_v2(content: AssistantSettingsContentV2) -> Self {
183 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
184 content,
185 )))
186 }
187}
188
189impl JsonSchema for AssistantSettingsContent {
190 fn schema_name() -> String {
191 VersionedAssistantSettingsContent::schema_name()
192 }
193
194 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
195 VersionedAssistantSettingsContent::json_schema(r#gen)
196 }
197
198 fn is_referenceable() -> bool {
199 VersionedAssistantSettingsContent::is_referenceable()
200 }
201}
202
203impl AssistantSettingsContent {
204 pub fn is_version_outdated(&self) -> bool {
205 match &self.inner {
206 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
207 VersionedAssistantSettingsContent::V1(_) => true,
208 VersionedAssistantSettingsContent::V2(_) => false,
209 },
210 Some(AssistantSettingsContentInner::Legacy(_)) => true,
211 None => false,
212 }
213 }
214
215 fn upgrade(&self) -> AssistantSettingsContentV2 {
216 match &self.inner {
217 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
218 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
219 enabled: settings.enabled,
220 button: settings.button,
221 dock: settings.dock,
222 default_width: settings.default_width,
223 default_height: settings.default_width,
224 default_model: settings
225 .provider
226 .clone()
227 .and_then(|provider| match provider {
228 AssistantProviderContentV1::ZedDotDev { default_model } => {
229 default_model.map(|model| LanguageModelSelection {
230 provider: "zed.dev".into(),
231 model: model.id().to_string(),
232 })
233 }
234 AssistantProviderContentV1::OpenAi { default_model, .. } => {
235 default_model.map(|model| LanguageModelSelection {
236 provider: "openai".into(),
237 model: model.id().to_string(),
238 })
239 }
240 AssistantProviderContentV1::Anthropic { default_model, .. } => {
241 default_model.map(|model| LanguageModelSelection {
242 provider: "anthropic".into(),
243 model: model.id().to_string(),
244 })
245 }
246 AssistantProviderContentV1::Ollama { default_model, .. } => {
247 default_model.map(|model| LanguageModelSelection {
248 provider: "ollama".into(),
249 model: model.id().to_string(),
250 })
251 }
252 AssistantProviderContentV1::LmStudio { default_model, .. } => {
253 default_model.map(|model| LanguageModelSelection {
254 provider: "lmstudio".into(),
255 model: model.id().to_string(),
256 })
257 }
258 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
259 default_model.map(|model| LanguageModelSelection {
260 provider: "deepseek".into(),
261 model: model.id().to_string(),
262 })
263 }
264 }),
265 inline_assistant_model: None,
266 commit_message_model: None,
267 thread_summary_model: None,
268 inline_alternatives: None,
269 enable_experimental_live_diffs: None,
270 default_profile: None,
271 profiles: None,
272 always_allow_tool_actions: None,
273 notify_when_agent_waiting: None,
274 stream_edits: None,
275 single_file_review: None,
276 model_parameters: Vec::new(),
277 preferred_completion_mode: None,
278 },
279 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
280 },
281 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
282 enabled: None,
283 button: settings.button,
284 dock: settings.dock,
285 default_width: settings.default_width,
286 default_height: settings.default_height,
287 default_model: Some(LanguageModelSelection {
288 provider: "openai".into(),
289 model: settings
290 .default_open_ai_model
291 .clone()
292 .unwrap_or_default()
293 .id()
294 .to_string(),
295 }),
296 inline_assistant_model: None,
297 commit_message_model: None,
298 thread_summary_model: None,
299 inline_alternatives: None,
300 enable_experimental_live_diffs: None,
301 default_profile: None,
302 profiles: None,
303 always_allow_tool_actions: None,
304 notify_when_agent_waiting: None,
305 stream_edits: None,
306 single_file_review: None,
307 model_parameters: Vec::new(),
308 preferred_completion_mode: None,
309 },
310 None => AssistantSettingsContentV2::default(),
311 }
312 }
313
314 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
315 match &mut self.inner {
316 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
317 VersionedAssistantSettingsContent::V1(ref mut settings) => {
318 settings.dock = Some(dock);
319 }
320 VersionedAssistantSettingsContent::V2(ref mut settings) => {
321 settings.dock = Some(dock);
322 }
323 },
324 Some(AssistantSettingsContentInner::Legacy(settings)) => {
325 settings.dock = Some(dock);
326 }
327 None => {
328 self.inner = Some(AssistantSettingsContentInner::for_v2(
329 AssistantSettingsContentV2 {
330 dock: Some(dock),
331 ..Default::default()
332 },
333 ))
334 }
335 }
336 }
337
338 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
339 let model = language_model.id().0.to_string();
340 let provider = language_model.provider_id().0.to_string();
341
342 match &mut self.inner {
343 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
344 VersionedAssistantSettingsContent::V1(ref mut settings) => {
345 match provider.as_ref() {
346 "zed.dev" => {
347 log::warn!("attempted to set zed.dev model on outdated settings");
348 }
349 "anthropic" => {
350 let api_url = match &settings.provider {
351 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
352 api_url.clone()
353 }
354 _ => None,
355 };
356 settings.provider = Some(AssistantProviderContentV1::Anthropic {
357 default_model: AnthropicModel::from_id(&model).ok(),
358 api_url,
359 });
360 }
361 "ollama" => {
362 let api_url = match &settings.provider {
363 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
364 api_url.clone()
365 }
366 _ => None,
367 };
368 settings.provider = Some(AssistantProviderContentV1::Ollama {
369 default_model: Some(ollama::Model::new(
370 &model,
371 None,
372 None,
373 language_model.supports_tools(),
374 )),
375 api_url,
376 });
377 }
378 "lmstudio" => {
379 let api_url = match &settings.provider {
380 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
381 api_url.clone()
382 }
383 _ => None,
384 };
385 settings.provider = Some(AssistantProviderContentV1::LmStudio {
386 default_model: Some(lmstudio::Model::new(&model, None, None)),
387 api_url,
388 });
389 }
390 "openai" => {
391 let (api_url, available_models) = match &settings.provider {
392 Some(AssistantProviderContentV1::OpenAi {
393 api_url,
394 available_models,
395 ..
396 }) => (api_url.clone(), available_models.clone()),
397 _ => (None, None),
398 };
399 settings.provider = Some(AssistantProviderContentV1::OpenAi {
400 default_model: OpenAiModel::from_id(&model).ok(),
401 api_url,
402 available_models,
403 });
404 }
405 "deepseek" => {
406 let api_url = match &settings.provider {
407 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
408 api_url.clone()
409 }
410 _ => None,
411 };
412 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
413 default_model: DeepseekModel::from_id(&model).ok(),
414 api_url,
415 });
416 }
417 _ => {}
418 }
419 }
420 VersionedAssistantSettingsContent::V2(ref mut settings) => {
421 settings.default_model = Some(LanguageModelSelection {
422 provider: provider.into(),
423 model,
424 });
425 }
426 },
427 Some(AssistantSettingsContentInner::Legacy(settings)) => {
428 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
429 settings.default_open_ai_model = Some(model);
430 }
431 }
432 None => {
433 self.inner = Some(AssistantSettingsContentInner::for_v2(
434 AssistantSettingsContentV2 {
435 default_model: Some(LanguageModelSelection {
436 provider: provider.into(),
437 model,
438 }),
439 ..Default::default()
440 },
441 ));
442 }
443 }
444 }
445
446 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
447 self.v2_setting(|setting| {
448 setting.inline_assistant_model = Some(LanguageModelSelection {
449 provider: provider.into(),
450 model,
451 });
452 Ok(())
453 })
454 .ok();
455 }
456
457 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
458 self.v2_setting(|setting| {
459 setting.commit_message_model = Some(LanguageModelSelection {
460 provider: provider.into(),
461 model,
462 });
463 Ok(())
464 })
465 .ok();
466 }
467
468 pub fn v2_setting(
469 &mut self,
470 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
471 ) -> anyhow::Result<()> {
472 match self.inner.get_or_insert_with(|| {
473 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
474 ..Default::default()
475 })
476 }) {
477 AssistantSettingsContentInner::Versioned(boxed) => {
478 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
479 f(settings)
480 } else {
481 Ok(())
482 }
483 }
484 _ => Ok(()),
485 }
486 }
487
488 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
489 self.v2_setting(|setting| {
490 setting.thread_summary_model = Some(LanguageModelSelection {
491 provider: provider.into(),
492 model,
493 });
494 Ok(())
495 })
496 .ok();
497 }
498
499 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
500 self.v2_setting(|setting| {
501 setting.always_allow_tool_actions = Some(allow);
502 Ok(())
503 })
504 .ok();
505 }
506
507 pub fn set_single_file_review(&mut self, allow: bool) {
508 self.v2_setting(|setting| {
509 setting.single_file_review = Some(allow);
510 Ok(())
511 })
512 .ok();
513 }
514
515 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
516 self.v2_setting(|setting| {
517 setting.default_profile = Some(profile_id);
518 Ok(())
519 })
520 .ok();
521 }
522
523 pub fn create_profile(
524 &mut self,
525 profile_id: AgentProfileId,
526 profile: AgentProfile,
527 ) -> Result<()> {
528 self.v2_setting(|settings| {
529 let profiles = settings.profiles.get_or_insert_default();
530 if profiles.contains_key(&profile_id) {
531 bail!("profile with ID '{profile_id}' already exists");
532 }
533
534 profiles.insert(
535 profile_id,
536 AgentProfileContent {
537 name: profile.name.into(),
538 tools: profile.tools,
539 enable_all_context_servers: Some(profile.enable_all_context_servers),
540 context_servers: profile
541 .context_servers
542 .into_iter()
543 .map(|(server_id, preset)| {
544 (
545 server_id,
546 ContextServerPresetContent {
547 tools: preset.tools,
548 },
549 )
550 })
551 .collect(),
552 },
553 );
554
555 Ok(())
556 })
557 }
558}
559
560#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
561#[serde(tag = "version")]
562pub enum VersionedAssistantSettingsContent {
563 #[serde(rename = "1")]
564 V1(AssistantSettingsContentV1),
565 #[serde(rename = "2")]
566 V2(AssistantSettingsContentV2),
567}
568
569impl Default for VersionedAssistantSettingsContent {
570 fn default() -> Self {
571 Self::V2(AssistantSettingsContentV2 {
572 enabled: None,
573 button: None,
574 dock: None,
575 default_width: None,
576 default_height: None,
577 default_model: None,
578 inline_assistant_model: None,
579 commit_message_model: None,
580 thread_summary_model: None,
581 inline_alternatives: None,
582 enable_experimental_live_diffs: None,
583 default_profile: None,
584 profiles: None,
585 always_allow_tool_actions: None,
586 notify_when_agent_waiting: None,
587 stream_edits: None,
588 single_file_review: None,
589 model_parameters: Vec::new(),
590 preferred_completion_mode: None,
591 })
592 }
593}
594
595#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
596pub struct AssistantSettingsContentV2 {
597 /// Whether the Assistant is enabled.
598 ///
599 /// Default: true
600 enabled: Option<bool>,
601 /// Whether to show the assistant panel button in the status bar.
602 ///
603 /// Default: true
604 button: Option<bool>,
605 /// Where to dock the assistant.
606 ///
607 /// Default: right
608 dock: Option<AssistantDockPosition>,
609 /// Default width in pixels when the assistant is docked to the left or right.
610 ///
611 /// Default: 640
612 default_width: Option<f32>,
613 /// Default height in pixels when the assistant is docked to the bottom.
614 ///
615 /// Default: 320
616 default_height: Option<f32>,
617 /// The default model to use when creating new chats and for other features when a specific model is not specified.
618 default_model: Option<LanguageModelSelection>,
619 /// Model to use for the inline assistant. Defaults to default_model when not specified.
620 inline_assistant_model: Option<LanguageModelSelection>,
621 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
622 commit_message_model: Option<LanguageModelSelection>,
623 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
624 thread_summary_model: Option<LanguageModelSelection>,
625 /// Additional models with which to generate alternatives when performing inline assists.
626 inline_alternatives: Option<Vec<LanguageModelSelection>>,
627 /// Enable experimental live diffs in the assistant panel.
628 ///
629 /// Default: false
630 enable_experimental_live_diffs: Option<bool>,
631 /// The default profile to use in the Agent.
632 ///
633 /// Default: write
634 default_profile: Option<AgentProfileId>,
635 /// The available agent profiles.
636 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
637 /// Whenever a tool action would normally wait for your confirmation
638 /// that you allow it, always choose to allow it.
639 ///
640 /// Default: false
641 always_allow_tool_actions: Option<bool>,
642 /// Where to show a popup notification when the agent is waiting for user input.
643 ///
644 /// Default: "primary_screen"
645 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
646 /// Whether to stream edits from the agent as they are received.
647 ///
648 /// Default: false
649 stream_edits: Option<bool>,
650 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
651 ///
652 /// Default: true
653 single_file_review: Option<bool>,
654 /// Additional parameters for language model requests. When making a request
655 /// to a model, parameters will be taken from the last entry in this list
656 /// that matches the model's provider and name. In each entry, both provider
657 /// and model are optional, so that you can specify parameters for either
658 /// one.
659 ///
660 /// Default: []
661 #[serde(default)]
662 model_parameters: Vec<LanguageModelParameters>,
663
664 /// What completion mode to enable for new threads
665 ///
666 /// Default: normal
667 preferred_completion_mode: Option<CompletionMode>,
668}
669
670#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
671#[serde(rename_all = "snake_case")]
672pub enum CompletionMode {
673 #[default]
674 Normal,
675 Max,
676}
677
678impl From<CompletionMode> for zed_llm_client::CompletionMode {
679 fn from(value: CompletionMode) -> Self {
680 match value {
681 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
682 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
683 }
684 }
685}
686
687#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
688pub struct LanguageModelSelection {
689 pub provider: LanguageModelProviderSetting,
690 pub model: String,
691}
692
693#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
694pub struct LanguageModelProviderSetting(pub String);
695
696impl JsonSchema for LanguageModelProviderSetting {
697 fn schema_name() -> String {
698 "LanguageModelProviderSetting".into()
699 }
700
701 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
702 schemars::schema::SchemaObject {
703 enum_values: Some(vec![
704 "anthropic".into(),
705 "bedrock".into(),
706 "google".into(),
707 "lmstudio".into(),
708 "ollama".into(),
709 "openai".into(),
710 "zed.dev".into(),
711 "copilot_chat".into(),
712 "deepseek".into(),
713 ]),
714 ..Default::default()
715 }
716 .into()
717 }
718}
719
720impl From<String> for LanguageModelProviderSetting {
721 fn from(provider: String) -> Self {
722 Self(provider)
723 }
724}
725
726impl From<&str> for LanguageModelProviderSetting {
727 fn from(provider: &str) -> Self {
728 Self(provider.to_string())
729 }
730}
731
732impl Default for LanguageModelSelection {
733 fn default() -> Self {
734 Self {
735 provider: LanguageModelProviderSetting("openai".to_string()),
736 model: "gpt-4".to_string(),
737 }
738 }
739}
740
741#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
742pub struct AgentProfileContent {
743 pub name: Arc<str>,
744 #[serde(default)]
745 pub tools: IndexMap<Arc<str>, bool>,
746 /// Whether all context servers are enabled by default.
747 pub enable_all_context_servers: Option<bool>,
748 #[serde(default)]
749 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
750}
751
752#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
753pub struct ContextServerPresetContent {
754 pub tools: IndexMap<Arc<str>, bool>,
755}
756
757#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
758pub struct AssistantSettingsContentV1 {
759 /// Whether the Assistant is enabled.
760 ///
761 /// Default: true
762 enabled: Option<bool>,
763 /// Whether to show the assistant panel button in the status bar.
764 ///
765 /// Default: true
766 button: Option<bool>,
767 /// Where to dock the assistant.
768 ///
769 /// Default: right
770 dock: Option<AssistantDockPosition>,
771 /// Default width in pixels when the assistant is docked to the left or right.
772 ///
773 /// Default: 640
774 default_width: Option<f32>,
775 /// Default height in pixels when the assistant is docked to the bottom.
776 ///
777 /// Default: 320
778 default_height: Option<f32>,
779 /// The provider of the assistant service.
780 ///
781 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
782 /// each with their respective default models and configurations.
783 provider: Option<AssistantProviderContentV1>,
784}
785
786#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
787pub struct LegacyAssistantSettingsContent {
788 /// Whether to show the assistant panel button in the status bar.
789 ///
790 /// Default: true
791 pub button: Option<bool>,
792 /// Where to dock the assistant.
793 ///
794 /// Default: right
795 pub dock: Option<AssistantDockPosition>,
796 /// Default width in pixels when the assistant is docked to the left or right.
797 ///
798 /// Default: 640
799 pub default_width: Option<f32>,
800 /// Default height in pixels when the assistant is docked to the bottom.
801 ///
802 /// Default: 320
803 pub default_height: Option<f32>,
804 /// The default OpenAI model to use when creating new chats.
805 ///
806 /// Default: gpt-4-1106-preview
807 pub default_open_ai_model: Option<OpenAiModel>,
808 /// OpenAI API base URL to use when creating new chats.
809 ///
810 /// Default: <https://api.openai.com/v1>
811 pub openai_api_url: Option<String>,
812}
813
814impl Settings for AssistantSettings {
815 const KEY: Option<&'static str> = Some("agent");
816
817 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
818
819 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
820
821 type FileContent = AssistantSettingsContent;
822
823 fn load(
824 sources: SettingsSources<Self::FileContent>,
825 _: &mut gpui::App,
826 ) -> anyhow::Result<Self> {
827 let mut settings = AssistantSettings::default();
828
829 for value in sources.defaults_and_customizations() {
830 if value.is_version_outdated() {
831 settings.using_outdated_settings_version = true;
832 }
833
834 let value = value.upgrade();
835 merge(&mut settings.enabled, value.enabled);
836 merge(&mut settings.button, value.button);
837 merge(&mut settings.dock, value.dock);
838 merge(
839 &mut settings.default_width,
840 value.default_width.map(Into::into),
841 );
842 merge(
843 &mut settings.default_height,
844 value.default_height.map(Into::into),
845 );
846 merge(&mut settings.default_model, value.default_model);
847 settings.inline_assistant_model = value
848 .inline_assistant_model
849 .or(settings.inline_assistant_model.take());
850 settings.commit_message_model = value
851 .commit_message_model
852 .or(settings.commit_message_model.take());
853 settings.thread_summary_model = value
854 .thread_summary_model
855 .or(settings.thread_summary_model.take());
856 merge(&mut settings.inline_alternatives, value.inline_alternatives);
857 merge(
858 &mut settings.enable_experimental_live_diffs,
859 value.enable_experimental_live_diffs,
860 );
861 merge(
862 &mut settings.always_allow_tool_actions,
863 value.always_allow_tool_actions,
864 );
865 merge(
866 &mut settings.notify_when_agent_waiting,
867 value.notify_when_agent_waiting,
868 );
869 merge(&mut settings.stream_edits, value.stream_edits);
870 merge(&mut settings.single_file_review, value.single_file_review);
871 merge(&mut settings.default_profile, value.default_profile);
872 merge(
873 &mut settings.preferred_completion_mode,
874 value.preferred_completion_mode,
875 );
876
877 settings
878 .model_parameters
879 .extend_from_slice(&value.model_parameters);
880
881 if let Some(profiles) = value.profiles {
882 settings
883 .profiles
884 .extend(profiles.into_iter().map(|(id, profile)| {
885 (
886 id,
887 AgentProfile {
888 name: profile.name.into(),
889 tools: profile.tools,
890 enable_all_context_servers: profile
891 .enable_all_context_servers
892 .unwrap_or_default(),
893 context_servers: profile
894 .context_servers
895 .into_iter()
896 .map(|(context_server_id, preset)| {
897 (
898 context_server_id,
899 ContextServerPreset {
900 tools: preset.tools.clone(),
901 },
902 )
903 })
904 .collect(),
905 },
906 )
907 }));
908 }
909 }
910
911 Ok(settings)
912 }
913
914 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
915 if let Some(b) = vscode
916 .read_value("chat.agent.enabled")
917 .and_then(|b| b.as_bool())
918 {
919 match &mut current.inner {
920 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
921 match versioned.as_mut() {
922 VersionedAssistantSettingsContent::V1(setting) => {
923 setting.enabled = Some(b);
924 setting.button = Some(b);
925 }
926
927 VersionedAssistantSettingsContent::V2(setting) => {
928 setting.enabled = Some(b);
929 setting.button = Some(b);
930 }
931 }
932 }
933 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
934 None => {
935 current.inner = Some(AssistantSettingsContentInner::for_v2(
936 AssistantSettingsContentV2 {
937 enabled: Some(b),
938 button: Some(b),
939 ..Default::default()
940 },
941 ));
942 }
943 }
944 }
945 }
946}
947
948fn merge<T>(target: &mut T, value: Option<T>) {
949 if let Some(value) = value {
950 *target = value;
951 }
952}
953
954#[cfg(test)]
955mod tests {
956 use fs::Fs;
957 use gpui::{ReadGlobal, TestAppContext};
958 use settings::SettingsStore;
959
960 use super::*;
961
962 #[gpui::test]
963 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
964 let fs = fs::FakeFs::new(cx.executor().clone());
965 fs.create_dir(paths::settings_file().parent().unwrap())
966 .await
967 .unwrap();
968
969 cx.update(|cx| {
970 let test_settings = settings::SettingsStore::test(cx);
971 cx.set_global(test_settings);
972 AssistantSettings::register(cx);
973 });
974
975 cx.update(|cx| {
976 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
977 assert_eq!(
978 AssistantSettings::get_global(cx).default_model,
979 LanguageModelSelection {
980 provider: "zed.dev".into(),
981 model: "claude-3-7-sonnet-latest".into(),
982 }
983 );
984 });
985
986 cx.update(|cx| {
987 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
988 fs.clone(),
989 |settings, _| {
990 *settings = AssistantSettingsContent {
991 inner: Some(AssistantSettingsContentInner::for_v2(
992 AssistantSettingsContentV2 {
993 default_model: Some(LanguageModelSelection {
994 provider: "test-provider".into(),
995 model: "gpt-99".into(),
996 }),
997 inline_assistant_model: None,
998 commit_message_model: None,
999 thread_summary_model: None,
1000 inline_alternatives: None,
1001 enabled: None,
1002 button: None,
1003 dock: None,
1004 default_width: None,
1005 default_height: None,
1006 enable_experimental_live_diffs: None,
1007 default_profile: None,
1008 profiles: None,
1009 always_allow_tool_actions: None,
1010 notify_when_agent_waiting: None,
1011 stream_edits: None,
1012 single_file_review: None,
1013 model_parameters: Vec::new(),
1014 preferred_completion_mode: None,
1015 },
1016 )),
1017 }
1018 },
1019 );
1020 });
1021
1022 cx.run_until_parked();
1023
1024 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1025 assert!(raw_settings_value.contains(r#""version": "2""#));
1026
1027 #[derive(Debug, Deserialize)]
1028 struct AssistantSettingsTest {
1029 agent: AssistantSettingsContent,
1030 }
1031
1032 let assistant_settings: AssistantSettingsTest =
1033 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1034
1035 assert!(!assistant_settings.agent.is_version_outdated());
1036 }
1037
1038 #[gpui::test]
1039 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1040 let fs = fs::FakeFs::new(cx.executor().clone());
1041 fs.create_dir(paths::settings_file().parent().unwrap())
1042 .await
1043 .unwrap();
1044
1045 cx.update(|cx| {
1046 let mut test_settings = settings::SettingsStore::test(cx);
1047 let user_settings_content = r#"{
1048 "assistant": {
1049 "enabled": true,
1050 "version": "2",
1051 "default_model": {
1052 "provider": "zed.dev",
1053 "model": "gpt-99"
1054 },
1055 }}"#;
1056 test_settings
1057 .set_user_settings(user_settings_content, cx)
1058 .unwrap();
1059 cx.set_global(test_settings);
1060 AssistantSettings::register(cx);
1061 });
1062
1063 cx.run_until_parked();
1064
1065 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1066 assert!(assistant_settings.enabled);
1067 assert!(!assistant_settings.using_outdated_settings_version);
1068 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1069
1070 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1071 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1072 *settings = AssistantSettingsContent {
1073 inner: Some(AssistantSettingsContentInner::for_v2(
1074 AssistantSettingsContentV2 {
1075 enabled: Some(false),
1076 default_model: Some(LanguageModelSelection {
1077 provider: "xai".to_owned().into(),
1078 model: "grok".to_owned(),
1079 }),
1080 ..Default::default()
1081 },
1082 )),
1083 };
1084 });
1085 });
1086
1087 cx.run_until_parked();
1088
1089 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1090
1091 #[derive(Debug, Deserialize)]
1092 struct AssistantSettingsTest {
1093 assistant: AssistantSettingsContent,
1094 agent: Option<serde_json_lenient::Value>,
1095 }
1096
1097 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1098 assert!(assistant_settings.agent.is_none());
1099 }
1100}