1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{JsonSchema, schema::Schema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20pub fn init(cx: &mut App) {
21 AssistantSettings::register(cx);
22}
23
24#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
25#[serde(rename_all = "snake_case")]
26pub enum AssistantDockPosition {
27 Left,
28 #[default]
29 Right,
30 Bottom,
31}
32
33#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
34#[serde(rename_all = "snake_case")]
35pub enum NotifyWhenAgentWaiting {
36 #[default]
37 PrimaryScreen,
38 AllScreens,
39 Never,
40}
41
42#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
43#[serde(tag = "name", rename_all = "snake_case")]
44pub enum AssistantProviderContentV1 {
45 #[serde(rename = "zed.dev")]
46 ZedDotDev { default_model: Option<CloudModel> },
47 #[serde(rename = "openai")]
48 OpenAi {
49 default_model: Option<OpenAiModel>,
50 api_url: Option<String>,
51 available_models: Option<Vec<OpenAiModel>>,
52 },
53 #[serde(rename = "anthropic")]
54 Anthropic {
55 default_model: Option<AnthropicModel>,
56 api_url: Option<String>,
57 },
58 #[serde(rename = "ollama")]
59 Ollama {
60 default_model: Option<OllamaModel>,
61 api_url: Option<String>,
62 },
63 #[serde(rename = "lmstudio")]
64 LmStudio {
65 default_model: Option<LmStudioModel>,
66 api_url: Option<String>,
67 },
68 #[serde(rename = "deepseek")]
69 DeepSeek {
70 default_model: Option<DeepseekModel>,
71 api_url: Option<String>,
72 },
73}
74
75#[derive(Default, Clone, Debug)]
76pub struct AssistantSettings {
77 pub enabled: bool,
78 pub button: bool,
79 pub dock: AssistantDockPosition,
80 pub default_width: Pixels,
81 pub default_height: Pixels,
82 pub default_model: LanguageModelSelection,
83 pub inline_assistant_model: Option<LanguageModelSelection>,
84 pub commit_message_model: Option<LanguageModelSelection>,
85 pub thread_summary_model: Option<LanguageModelSelection>,
86 pub inline_alternatives: Vec<LanguageModelSelection>,
87 pub using_outdated_settings_version: bool,
88 pub enable_experimental_live_diffs: bool,
89 pub default_profile: AgentProfileId,
90 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
91 pub always_allow_tool_actions: bool,
92 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
93 pub stream_edits: bool,
94 pub single_file_review: bool,
95 pub model_parameters: Vec<LanguageModelParameters>,
96 pub preferred_completion_mode: CompletionMode,
97}
98
99impl AssistantSettings {
100 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
101 let settings = Self::get_global(cx);
102 settings
103 .model_parameters
104 .iter()
105 .rfind(|setting| setting.matches(model))
106 .and_then(|m| m.temperature)
107 }
108
109 pub fn stream_edits(&self, _cx: &App) -> bool {
110 // TODO: Remove the `stream_edits` setting.
111 true
112 }
113
114 pub fn are_live_diffs_enabled(&self, _cx: &App) -> bool {
115 false
116 }
117
118 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
119 self.inline_assistant_model = Some(LanguageModelSelection {
120 provider: provider.into(),
121 model,
122 });
123 }
124
125 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
126 self.commit_message_model = Some(LanguageModelSelection {
127 provider: provider.into(),
128 model,
129 });
130 }
131
132 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
133 self.thread_summary_model = Some(LanguageModelSelection {
134 provider: provider.into(),
135 model,
136 });
137 }
138}
139
140#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
141pub struct LanguageModelParameters {
142 pub provider: Option<LanguageModelProviderSetting>,
143 pub model: Option<SharedString>,
144 pub temperature: Option<f32>,
145}
146
147impl LanguageModelParameters {
148 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
149 if let Some(provider) = &self.provider {
150 if provider.0 != model.provider_id().0 {
151 return false;
152 }
153 }
154 if let Some(setting_model) = &self.model {
155 if *setting_model != model.id().0 {
156 return false;
157 }
158 }
159 true
160 }
161}
162
163/// Assistant panel settings
164#[derive(Clone, Serialize, Deserialize, Debug, Default)]
165pub struct AssistantSettingsContent {
166 #[serde(flatten)]
167 pub inner: Option<AssistantSettingsContentInner>,
168}
169
170#[derive(Clone, Serialize, Deserialize, Debug)]
171#[serde(untagged)]
172pub enum AssistantSettingsContentInner {
173 Versioned(Box<VersionedAssistantSettingsContent>),
174 Legacy(LegacyAssistantSettingsContent),
175}
176
177impl AssistantSettingsContentInner {
178 fn for_v2(content: AssistantSettingsContentV2) -> Self {
179 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
180 content,
181 )))
182 }
183}
184
185impl JsonSchema for AssistantSettingsContent {
186 fn schema_name() -> String {
187 VersionedAssistantSettingsContent::schema_name()
188 }
189
190 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
191 VersionedAssistantSettingsContent::json_schema(r#gen)
192 }
193
194 fn is_referenceable() -> bool {
195 VersionedAssistantSettingsContent::is_referenceable()
196 }
197}
198
199impl AssistantSettingsContent {
200 pub fn is_version_outdated(&self) -> bool {
201 match &self.inner {
202 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
203 VersionedAssistantSettingsContent::V1(_) => true,
204 VersionedAssistantSettingsContent::V2(_) => false,
205 },
206 Some(AssistantSettingsContentInner::Legacy(_)) => true,
207 None => false,
208 }
209 }
210
211 fn upgrade(&self) -> AssistantSettingsContentV2 {
212 match &self.inner {
213 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
214 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
215 enabled: settings.enabled,
216 button: settings.button,
217 dock: settings.dock,
218 default_width: settings.default_width,
219 default_height: settings.default_width,
220 default_model: settings
221 .provider
222 .clone()
223 .and_then(|provider| match provider {
224 AssistantProviderContentV1::ZedDotDev { default_model } => {
225 default_model.map(|model| LanguageModelSelection {
226 provider: "zed.dev".into(),
227 model: model.id().to_string(),
228 })
229 }
230 AssistantProviderContentV1::OpenAi { default_model, .. } => {
231 default_model.map(|model| LanguageModelSelection {
232 provider: "openai".into(),
233 model: model.id().to_string(),
234 })
235 }
236 AssistantProviderContentV1::Anthropic { default_model, .. } => {
237 default_model.map(|model| LanguageModelSelection {
238 provider: "anthropic".into(),
239 model: model.id().to_string(),
240 })
241 }
242 AssistantProviderContentV1::Ollama { default_model, .. } => {
243 default_model.map(|model| LanguageModelSelection {
244 provider: "ollama".into(),
245 model: model.id().to_string(),
246 })
247 }
248 AssistantProviderContentV1::LmStudio { default_model, .. } => {
249 default_model.map(|model| LanguageModelSelection {
250 provider: "lmstudio".into(),
251 model: model.id().to_string(),
252 })
253 }
254 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
255 default_model.map(|model| LanguageModelSelection {
256 provider: "deepseek".into(),
257 model: model.id().to_string(),
258 })
259 }
260 }),
261 inline_assistant_model: None,
262 commit_message_model: None,
263 thread_summary_model: None,
264 inline_alternatives: None,
265 enable_experimental_live_diffs: None,
266 default_profile: None,
267 profiles: None,
268 always_allow_tool_actions: None,
269 notify_when_agent_waiting: None,
270 stream_edits: None,
271 single_file_review: None,
272 model_parameters: Vec::new(),
273 preferred_completion_mode: None,
274 },
275 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
276 },
277 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
278 enabled: None,
279 button: settings.button,
280 dock: settings.dock,
281 default_width: settings.default_width,
282 default_height: settings.default_height,
283 default_model: Some(LanguageModelSelection {
284 provider: "openai".into(),
285 model: settings
286 .default_open_ai_model
287 .clone()
288 .unwrap_or_default()
289 .id()
290 .to_string(),
291 }),
292 inline_assistant_model: None,
293 commit_message_model: None,
294 thread_summary_model: None,
295 inline_alternatives: None,
296 enable_experimental_live_diffs: None,
297 default_profile: None,
298 profiles: None,
299 always_allow_tool_actions: None,
300 notify_when_agent_waiting: None,
301 stream_edits: None,
302 single_file_review: None,
303 model_parameters: Vec::new(),
304 preferred_completion_mode: None,
305 },
306 None => AssistantSettingsContentV2::default(),
307 }
308 }
309
310 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
311 match &mut self.inner {
312 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
313 VersionedAssistantSettingsContent::V1(ref mut settings) => {
314 settings.dock = Some(dock);
315 }
316 VersionedAssistantSettingsContent::V2(ref mut settings) => {
317 settings.dock = Some(dock);
318 }
319 },
320 Some(AssistantSettingsContentInner::Legacy(settings)) => {
321 settings.dock = Some(dock);
322 }
323 None => {
324 self.inner = Some(AssistantSettingsContentInner::for_v2(
325 AssistantSettingsContentV2 {
326 dock: Some(dock),
327 ..Default::default()
328 },
329 ))
330 }
331 }
332 }
333
334 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
335 let model = language_model.id().0.to_string();
336 let provider = language_model.provider_id().0.to_string();
337
338 match &mut self.inner {
339 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
340 VersionedAssistantSettingsContent::V1(ref mut settings) => {
341 match provider.as_ref() {
342 "zed.dev" => {
343 log::warn!("attempted to set zed.dev model on outdated settings");
344 }
345 "anthropic" => {
346 let api_url = match &settings.provider {
347 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
348 api_url.clone()
349 }
350 _ => None,
351 };
352 settings.provider = Some(AssistantProviderContentV1::Anthropic {
353 default_model: AnthropicModel::from_id(&model).ok(),
354 api_url,
355 });
356 }
357 "ollama" => {
358 let api_url = match &settings.provider {
359 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
360 api_url.clone()
361 }
362 _ => None,
363 };
364 settings.provider = Some(AssistantProviderContentV1::Ollama {
365 default_model: Some(ollama::Model::new(
366 &model,
367 None,
368 None,
369 language_model.supports_tools(),
370 )),
371 api_url,
372 });
373 }
374 "lmstudio" => {
375 let api_url = match &settings.provider {
376 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
377 api_url.clone()
378 }
379 _ => None,
380 };
381 settings.provider = Some(AssistantProviderContentV1::LmStudio {
382 default_model: Some(lmstudio::Model::new(&model, None, None)),
383 api_url,
384 });
385 }
386 "openai" => {
387 let (api_url, available_models) = match &settings.provider {
388 Some(AssistantProviderContentV1::OpenAi {
389 api_url,
390 available_models,
391 ..
392 }) => (api_url.clone(), available_models.clone()),
393 _ => (None, None),
394 };
395 settings.provider = Some(AssistantProviderContentV1::OpenAi {
396 default_model: OpenAiModel::from_id(&model).ok(),
397 api_url,
398 available_models,
399 });
400 }
401 "deepseek" => {
402 let api_url = match &settings.provider {
403 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
404 api_url.clone()
405 }
406 _ => None,
407 };
408 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
409 default_model: DeepseekModel::from_id(&model).ok(),
410 api_url,
411 });
412 }
413 _ => {}
414 }
415 }
416 VersionedAssistantSettingsContent::V2(ref mut settings) => {
417 settings.default_model = Some(LanguageModelSelection {
418 provider: provider.into(),
419 model,
420 });
421 }
422 },
423 Some(AssistantSettingsContentInner::Legacy(settings)) => {
424 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
425 settings.default_open_ai_model = Some(model);
426 }
427 }
428 None => {
429 self.inner = Some(AssistantSettingsContentInner::for_v2(
430 AssistantSettingsContentV2 {
431 default_model: Some(LanguageModelSelection {
432 provider: provider.into(),
433 model,
434 }),
435 ..Default::default()
436 },
437 ));
438 }
439 }
440 }
441
442 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
443 self.v2_setting(|setting| {
444 setting.inline_assistant_model = Some(LanguageModelSelection {
445 provider: provider.into(),
446 model,
447 });
448 Ok(())
449 })
450 .ok();
451 }
452
453 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
454 self.v2_setting(|setting| {
455 setting.commit_message_model = Some(LanguageModelSelection {
456 provider: provider.into(),
457 model,
458 });
459 Ok(())
460 })
461 .ok();
462 }
463
464 pub fn v2_setting(
465 &mut self,
466 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
467 ) -> anyhow::Result<()> {
468 match self.inner.get_or_insert_with(|| {
469 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
470 ..Default::default()
471 })
472 }) {
473 AssistantSettingsContentInner::Versioned(boxed) => {
474 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
475 f(settings)
476 } else {
477 Ok(())
478 }
479 }
480 _ => Ok(()),
481 }
482 }
483
484 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
485 self.v2_setting(|setting| {
486 setting.thread_summary_model = Some(LanguageModelSelection {
487 provider: provider.into(),
488 model,
489 });
490 Ok(())
491 })
492 .ok();
493 }
494
495 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
496 self.v2_setting(|setting| {
497 setting.always_allow_tool_actions = Some(allow);
498 Ok(())
499 })
500 .ok();
501 }
502
503 pub fn set_single_file_review(&mut self, allow: bool) {
504 self.v2_setting(|setting| {
505 setting.single_file_review = Some(allow);
506 Ok(())
507 })
508 .ok();
509 }
510
511 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
512 self.v2_setting(|setting| {
513 setting.default_profile = Some(profile_id);
514 Ok(())
515 })
516 .ok();
517 }
518
519 pub fn create_profile(
520 &mut self,
521 profile_id: AgentProfileId,
522 profile: AgentProfile,
523 ) -> Result<()> {
524 self.v2_setting(|settings| {
525 let profiles = settings.profiles.get_or_insert_default();
526 if profiles.contains_key(&profile_id) {
527 bail!("profile with ID '{profile_id}' already exists");
528 }
529
530 profiles.insert(
531 profile_id,
532 AgentProfileContent {
533 name: profile.name.into(),
534 tools: profile.tools,
535 enable_all_context_servers: Some(profile.enable_all_context_servers),
536 context_servers: profile
537 .context_servers
538 .into_iter()
539 .map(|(server_id, preset)| {
540 (
541 server_id,
542 ContextServerPresetContent {
543 tools: preset.tools,
544 },
545 )
546 })
547 .collect(),
548 },
549 );
550
551 Ok(())
552 })
553 }
554}
555
556#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
557#[serde(tag = "version")]
558pub enum VersionedAssistantSettingsContent {
559 #[serde(rename = "1")]
560 V1(AssistantSettingsContentV1),
561 #[serde(rename = "2")]
562 V2(AssistantSettingsContentV2),
563}
564
565impl Default for VersionedAssistantSettingsContent {
566 fn default() -> Self {
567 Self::V2(AssistantSettingsContentV2 {
568 enabled: None,
569 button: None,
570 dock: None,
571 default_width: None,
572 default_height: None,
573 default_model: None,
574 inline_assistant_model: None,
575 commit_message_model: None,
576 thread_summary_model: None,
577 inline_alternatives: None,
578 enable_experimental_live_diffs: None,
579 default_profile: None,
580 profiles: None,
581 always_allow_tool_actions: None,
582 notify_when_agent_waiting: None,
583 stream_edits: None,
584 single_file_review: None,
585 model_parameters: Vec::new(),
586 preferred_completion_mode: None,
587 })
588 }
589}
590
591#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
592pub struct AssistantSettingsContentV2 {
593 /// Whether the Assistant is enabled.
594 ///
595 /// Default: true
596 enabled: Option<bool>,
597 /// Whether to show the assistant panel button in the status bar.
598 ///
599 /// Default: true
600 button: Option<bool>,
601 /// Where to dock the assistant.
602 ///
603 /// Default: right
604 dock: Option<AssistantDockPosition>,
605 /// Default width in pixels when the assistant is docked to the left or right.
606 ///
607 /// Default: 640
608 default_width: Option<f32>,
609 /// Default height in pixels when the assistant is docked to the bottom.
610 ///
611 /// Default: 320
612 default_height: Option<f32>,
613 /// The default model to use when creating new chats and for other features when a specific model is not specified.
614 default_model: Option<LanguageModelSelection>,
615 /// Model to use for the inline assistant. Defaults to default_model when not specified.
616 inline_assistant_model: Option<LanguageModelSelection>,
617 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
618 commit_message_model: Option<LanguageModelSelection>,
619 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
620 thread_summary_model: Option<LanguageModelSelection>,
621 /// Additional models with which to generate alternatives when performing inline assists.
622 inline_alternatives: Option<Vec<LanguageModelSelection>>,
623 /// Enable experimental live diffs in the assistant panel.
624 ///
625 /// Default: false
626 enable_experimental_live_diffs: Option<bool>,
627 /// The default profile to use in the Agent.
628 ///
629 /// Default: write
630 default_profile: Option<AgentProfileId>,
631 /// The available agent profiles.
632 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
633 /// Whenever a tool action would normally wait for your confirmation
634 /// that you allow it, always choose to allow it.
635 ///
636 /// Default: false
637 always_allow_tool_actions: Option<bool>,
638 /// Where to show a popup notification when the agent is waiting for user input.
639 ///
640 /// Default: "primary_screen"
641 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
642 /// Whether to stream edits from the agent as they are received.
643 ///
644 /// Default: false
645 stream_edits: Option<bool>,
646 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
647 ///
648 /// Default: true
649 single_file_review: Option<bool>,
650 /// Additional parameters for language model requests. When making a request
651 /// to a model, parameters will be taken from the last entry in this list
652 /// that matches the model's provider and name. In each entry, both provider
653 /// and model are optional, so that you can specify parameters for either
654 /// one.
655 ///
656 /// Default: []
657 #[serde(default)]
658 model_parameters: Vec<LanguageModelParameters>,
659
660 /// What completion mode to enable for new threads
661 ///
662 /// Default: normal
663 preferred_completion_mode: Option<CompletionMode>,
664}
665
666#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
667#[serde(rename_all = "snake_case")]
668pub enum CompletionMode {
669 #[default]
670 Normal,
671 Max,
672}
673
674impl From<CompletionMode> for zed_llm_client::CompletionMode {
675 fn from(value: CompletionMode) -> Self {
676 match value {
677 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
678 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
679 }
680 }
681}
682
683#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
684pub struct LanguageModelSelection {
685 pub provider: LanguageModelProviderSetting,
686 pub model: String,
687}
688
689#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
690pub struct LanguageModelProviderSetting(pub String);
691
692impl JsonSchema for LanguageModelProviderSetting {
693 fn schema_name() -> String {
694 "LanguageModelProviderSetting".into()
695 }
696
697 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
698 schemars::schema::SchemaObject {
699 enum_values: Some(vec![
700 "anthropic".into(),
701 "bedrock".into(),
702 "google".into(),
703 "lmstudio".into(),
704 "ollama".into(),
705 "openai".into(),
706 "zed.dev".into(),
707 "copilot_chat".into(),
708 "deepseek".into(),
709 ]),
710 ..Default::default()
711 }
712 .into()
713 }
714}
715
716impl From<String> for LanguageModelProviderSetting {
717 fn from(provider: String) -> Self {
718 Self(provider)
719 }
720}
721
722impl From<&str> for LanguageModelProviderSetting {
723 fn from(provider: &str) -> Self {
724 Self(provider.to_string())
725 }
726}
727
728impl Default for LanguageModelSelection {
729 fn default() -> Self {
730 Self {
731 provider: LanguageModelProviderSetting("openai".to_string()),
732 model: "gpt-4".to_string(),
733 }
734 }
735}
736
737#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
738pub struct AgentProfileContent {
739 pub name: Arc<str>,
740 #[serde(default)]
741 pub tools: IndexMap<Arc<str>, bool>,
742 /// Whether all context servers are enabled by default.
743 pub enable_all_context_servers: Option<bool>,
744 #[serde(default)]
745 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
746}
747
748#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
749pub struct ContextServerPresetContent {
750 pub tools: IndexMap<Arc<str>, bool>,
751}
752
753#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
754pub struct AssistantSettingsContentV1 {
755 /// Whether the Assistant is enabled.
756 ///
757 /// Default: true
758 enabled: Option<bool>,
759 /// Whether to show the assistant panel button in the status bar.
760 ///
761 /// Default: true
762 button: Option<bool>,
763 /// Where to dock the assistant.
764 ///
765 /// Default: right
766 dock: Option<AssistantDockPosition>,
767 /// Default width in pixels when the assistant is docked to the left or right.
768 ///
769 /// Default: 640
770 default_width: Option<f32>,
771 /// Default height in pixels when the assistant is docked to the bottom.
772 ///
773 /// Default: 320
774 default_height: Option<f32>,
775 /// The provider of the assistant service.
776 ///
777 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
778 /// each with their respective default models and configurations.
779 provider: Option<AssistantProviderContentV1>,
780}
781
782#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
783pub struct LegacyAssistantSettingsContent {
784 /// Whether to show the assistant panel button in the status bar.
785 ///
786 /// Default: true
787 pub button: Option<bool>,
788 /// Where to dock the assistant.
789 ///
790 /// Default: right
791 pub dock: Option<AssistantDockPosition>,
792 /// Default width in pixels when the assistant is docked to the left or right.
793 ///
794 /// Default: 640
795 pub default_width: Option<f32>,
796 /// Default height in pixels when the assistant is docked to the bottom.
797 ///
798 /// Default: 320
799 pub default_height: Option<f32>,
800 /// The default OpenAI model to use when creating new chats.
801 ///
802 /// Default: gpt-4-1106-preview
803 pub default_open_ai_model: Option<OpenAiModel>,
804 /// OpenAI API base URL to use when creating new chats.
805 ///
806 /// Default: <https://api.openai.com/v1>
807 pub openai_api_url: Option<String>,
808}
809
810impl Settings for AssistantSettings {
811 const KEY: Option<&'static str> = Some("agent");
812
813 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
814
815 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
816
817 type FileContent = AssistantSettingsContent;
818
819 fn load(
820 sources: SettingsSources<Self::FileContent>,
821 _: &mut gpui::App,
822 ) -> anyhow::Result<Self> {
823 let mut settings = AssistantSettings::default();
824
825 for value in sources.defaults_and_customizations() {
826 if value.is_version_outdated() {
827 settings.using_outdated_settings_version = true;
828 }
829
830 let value = value.upgrade();
831 merge(&mut settings.enabled, value.enabled);
832 merge(&mut settings.button, value.button);
833 merge(&mut settings.dock, value.dock);
834 merge(
835 &mut settings.default_width,
836 value.default_width.map(Into::into),
837 );
838 merge(
839 &mut settings.default_height,
840 value.default_height.map(Into::into),
841 );
842 merge(&mut settings.default_model, value.default_model);
843 settings.inline_assistant_model = value
844 .inline_assistant_model
845 .or(settings.inline_assistant_model.take());
846 settings.commit_message_model = value
847 .commit_message_model
848 .or(settings.commit_message_model.take());
849 settings.thread_summary_model = value
850 .thread_summary_model
851 .or(settings.thread_summary_model.take());
852 merge(&mut settings.inline_alternatives, value.inline_alternatives);
853 merge(
854 &mut settings.enable_experimental_live_diffs,
855 value.enable_experimental_live_diffs,
856 );
857 merge(
858 &mut settings.always_allow_tool_actions,
859 value.always_allow_tool_actions,
860 );
861 merge(
862 &mut settings.notify_when_agent_waiting,
863 value.notify_when_agent_waiting,
864 );
865 merge(&mut settings.stream_edits, value.stream_edits);
866 merge(&mut settings.single_file_review, value.single_file_review);
867 merge(&mut settings.default_profile, value.default_profile);
868 merge(
869 &mut settings.preferred_completion_mode,
870 value.preferred_completion_mode,
871 );
872
873 settings
874 .model_parameters
875 .extend_from_slice(&value.model_parameters);
876
877 if let Some(profiles) = value.profiles {
878 settings
879 .profiles
880 .extend(profiles.into_iter().map(|(id, profile)| {
881 (
882 id,
883 AgentProfile {
884 name: profile.name.into(),
885 tools: profile.tools,
886 enable_all_context_servers: profile
887 .enable_all_context_servers
888 .unwrap_or_default(),
889 context_servers: profile
890 .context_servers
891 .into_iter()
892 .map(|(context_server_id, preset)| {
893 (
894 context_server_id,
895 ContextServerPreset {
896 tools: preset.tools.clone(),
897 },
898 )
899 })
900 .collect(),
901 },
902 )
903 }));
904 }
905 }
906
907 Ok(settings)
908 }
909
910 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
911 if let Some(b) = vscode
912 .read_value("chat.agent.enabled")
913 .and_then(|b| b.as_bool())
914 {
915 match &mut current.inner {
916 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
917 match versioned.as_mut() {
918 VersionedAssistantSettingsContent::V1(setting) => {
919 setting.enabled = Some(b);
920 setting.button = Some(b);
921 }
922
923 VersionedAssistantSettingsContent::V2(setting) => {
924 setting.enabled = Some(b);
925 setting.button = Some(b);
926 }
927 }
928 }
929 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
930 None => {
931 current.inner = Some(AssistantSettingsContentInner::for_v2(
932 AssistantSettingsContentV2 {
933 enabled: Some(b),
934 button: Some(b),
935 ..Default::default()
936 },
937 ));
938 }
939 }
940 }
941 }
942}
943
944fn merge<T>(target: &mut T, value: Option<T>) {
945 if let Some(value) = value {
946 *target = value;
947 }
948}
949
950#[cfg(test)]
951mod tests {
952 use fs::Fs;
953 use gpui::{ReadGlobal, TestAppContext};
954 use settings::SettingsStore;
955
956 use super::*;
957
958 #[gpui::test]
959 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
960 let fs = fs::FakeFs::new(cx.executor().clone());
961 fs.create_dir(paths::settings_file().parent().unwrap())
962 .await
963 .unwrap();
964
965 cx.update(|cx| {
966 let test_settings = settings::SettingsStore::test(cx);
967 cx.set_global(test_settings);
968 AssistantSettings::register(cx);
969 });
970
971 cx.update(|cx| {
972 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
973 assert_eq!(
974 AssistantSettings::get_global(cx).default_model,
975 LanguageModelSelection {
976 provider: "zed.dev".into(),
977 model: "claude-3-7-sonnet-latest".into(),
978 }
979 );
980 });
981
982 cx.update(|cx| {
983 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
984 fs.clone(),
985 |settings, _| {
986 *settings = AssistantSettingsContent {
987 inner: Some(AssistantSettingsContentInner::for_v2(
988 AssistantSettingsContentV2 {
989 default_model: Some(LanguageModelSelection {
990 provider: "test-provider".into(),
991 model: "gpt-99".into(),
992 }),
993 inline_assistant_model: None,
994 commit_message_model: None,
995 thread_summary_model: None,
996 inline_alternatives: None,
997 enabled: None,
998 button: None,
999 dock: None,
1000 default_width: None,
1001 default_height: None,
1002 enable_experimental_live_diffs: None,
1003 default_profile: None,
1004 profiles: None,
1005 always_allow_tool_actions: None,
1006 notify_when_agent_waiting: None,
1007 stream_edits: None,
1008 single_file_review: None,
1009 model_parameters: Vec::new(),
1010 preferred_completion_mode: None,
1011 },
1012 )),
1013 }
1014 },
1015 );
1016 });
1017
1018 cx.run_until_parked();
1019
1020 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1021 assert!(raw_settings_value.contains(r#""version": "2""#));
1022
1023 #[derive(Debug, Deserialize)]
1024 struct AssistantSettingsTest {
1025 agent: AssistantSettingsContent,
1026 }
1027
1028 let assistant_settings: AssistantSettingsTest =
1029 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1030
1031 assert!(!assistant_settings.agent.is_version_outdated());
1032 }
1033
1034 #[gpui::test]
1035 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1036 let fs = fs::FakeFs::new(cx.executor().clone());
1037 fs.create_dir(paths::settings_file().parent().unwrap())
1038 .await
1039 .unwrap();
1040
1041 cx.update(|cx| {
1042 let mut test_settings = settings::SettingsStore::test(cx);
1043 let user_settings_content = r#"{
1044 "assistant": {
1045 "enabled": true,
1046 "version": "2",
1047 "default_model": {
1048 "provider": "zed.dev",
1049 "model": "gpt-99"
1050 },
1051 }}"#;
1052 test_settings
1053 .set_user_settings(user_settings_content, cx)
1054 .unwrap();
1055 cx.set_global(test_settings);
1056 AssistantSettings::register(cx);
1057 });
1058
1059 cx.run_until_parked();
1060
1061 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1062 assert!(assistant_settings.enabled);
1063 assert!(!assistant_settings.using_outdated_settings_version);
1064 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1065
1066 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1067 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1068 *settings = AssistantSettingsContent {
1069 inner: Some(AssistantSettingsContentInner::for_v2(
1070 AssistantSettingsContentV2 {
1071 enabled: Some(false),
1072 default_model: Some(LanguageModelSelection {
1073 provider: "xai".to_owned().into(),
1074 model: "grok".to_owned(),
1075 }),
1076 ..Default::default()
1077 },
1078 )),
1079 };
1080 });
1081 });
1082
1083 cx.run_until_parked();
1084
1085 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1086
1087 #[derive(Debug, Deserialize)]
1088 struct AssistantSettingsTest {
1089 assistant: AssistantSettingsContent,
1090 agent: Option<serde_json_lenient::Value>,
1091 }
1092
1093 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1094 assert!(assistant_settings.agent.is_none());
1095 }
1096}