1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{JsonSchema, schema::Schema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20pub fn init(cx: &mut App) {
21 AssistantSettings::register(cx);
22}
23
24#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
25#[serde(rename_all = "snake_case")]
26pub enum AssistantDockPosition {
27 Left,
28 #[default]
29 Right,
30 Bottom,
31}
32
33#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
34#[serde(rename_all = "snake_case")]
35pub enum NotifyWhenAgentWaiting {
36 #[default]
37 PrimaryScreen,
38 AllScreens,
39 Never,
40}
41
42#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
43#[serde(tag = "name", rename_all = "snake_case")]
44pub enum AssistantProviderContentV1 {
45 #[serde(rename = "zed.dev")]
46 ZedDotDev { default_model: Option<CloudModel> },
47 #[serde(rename = "openai")]
48 OpenAi {
49 default_model: Option<OpenAiModel>,
50 api_url: Option<String>,
51 available_models: Option<Vec<OpenAiModel>>,
52 },
53 #[serde(rename = "anthropic")]
54 Anthropic {
55 default_model: Option<AnthropicModel>,
56 api_url: Option<String>,
57 },
58 #[serde(rename = "ollama")]
59 Ollama {
60 default_model: Option<OllamaModel>,
61 api_url: Option<String>,
62 },
63 #[serde(rename = "lmstudio")]
64 LmStudio {
65 default_model: Option<LmStudioModel>,
66 api_url: Option<String>,
67 },
68 #[serde(rename = "deepseek")]
69 DeepSeek {
70 default_model: Option<DeepseekModel>,
71 api_url: Option<String>,
72 },
73}
74
75#[derive(Default, Clone, Debug)]
76pub struct AssistantSettings {
77 pub enabled: bool,
78 pub button: bool,
79 pub dock: AssistantDockPosition,
80 pub default_width: Pixels,
81 pub default_height: Pixels,
82 pub default_model: LanguageModelSelection,
83 pub inline_assistant_model: Option<LanguageModelSelection>,
84 pub commit_message_model: Option<LanguageModelSelection>,
85 pub thread_summary_model: Option<LanguageModelSelection>,
86 pub inline_alternatives: Vec<LanguageModelSelection>,
87 pub using_outdated_settings_version: bool,
88 pub enable_experimental_live_diffs: bool,
89 pub default_profile: AgentProfileId,
90 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
91 pub always_allow_tool_actions: bool,
92 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
93 pub stream_edits: bool,
94 pub single_file_review: bool,
95 pub model_parameters: Vec<LanguageModelParameters>,
96 pub preferred_completion_mode: CompletionMode,
97}
98
99impl AssistantSettings {
100 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
101 let settings = Self::get_global(cx);
102 settings
103 .model_parameters
104 .iter()
105 .rfind(|setting| setting.matches(model))
106 .and_then(|m| m.temperature)
107 }
108
109 pub fn are_live_diffs_enabled(&self, _cx: &App) -> bool {
110 false
111 }
112
113 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
114 self.inline_assistant_model = Some(LanguageModelSelection {
115 provider: provider.into(),
116 model,
117 });
118 }
119
120 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
121 self.commit_message_model = Some(LanguageModelSelection {
122 provider: provider.into(),
123 model,
124 });
125 }
126
127 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
128 self.thread_summary_model = Some(LanguageModelSelection {
129 provider: provider.into(),
130 model,
131 });
132 }
133}
134
135#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
136pub struct LanguageModelParameters {
137 pub provider: Option<LanguageModelProviderSetting>,
138 pub model: Option<SharedString>,
139 pub temperature: Option<f32>,
140}
141
142impl LanguageModelParameters {
143 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
144 if let Some(provider) = &self.provider {
145 if provider.0 != model.provider_id().0 {
146 return false;
147 }
148 }
149 if let Some(setting_model) = &self.model {
150 if *setting_model != model.id().0 {
151 return false;
152 }
153 }
154 true
155 }
156}
157
158/// Assistant panel settings
159#[derive(Clone, Serialize, Deserialize, Debug, Default)]
160pub struct AssistantSettingsContent {
161 #[serde(flatten)]
162 pub inner: Option<AssistantSettingsContentInner>,
163}
164
165#[derive(Clone, Serialize, Deserialize, Debug)]
166#[serde(untagged)]
167pub enum AssistantSettingsContentInner {
168 Versioned(Box<VersionedAssistantSettingsContent>),
169 Legacy(LegacyAssistantSettingsContent),
170}
171
172impl AssistantSettingsContentInner {
173 fn for_v2(content: AssistantSettingsContentV2) -> Self {
174 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
175 content,
176 )))
177 }
178}
179
180impl JsonSchema for AssistantSettingsContent {
181 fn schema_name() -> String {
182 VersionedAssistantSettingsContent::schema_name()
183 }
184
185 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
186 VersionedAssistantSettingsContent::json_schema(r#gen)
187 }
188
189 fn is_referenceable() -> bool {
190 VersionedAssistantSettingsContent::is_referenceable()
191 }
192}
193
194impl AssistantSettingsContent {
195 pub fn is_version_outdated(&self) -> bool {
196 match &self.inner {
197 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
198 VersionedAssistantSettingsContent::V1(_) => true,
199 VersionedAssistantSettingsContent::V2(_) => false,
200 },
201 Some(AssistantSettingsContentInner::Legacy(_)) => true,
202 None => false,
203 }
204 }
205
206 fn upgrade(&self) -> AssistantSettingsContentV2 {
207 match &self.inner {
208 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
209 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
210 enabled: settings.enabled,
211 button: settings.button,
212 dock: settings.dock,
213 default_width: settings.default_width,
214 default_height: settings.default_width,
215 default_model: settings
216 .provider
217 .clone()
218 .and_then(|provider| match provider {
219 AssistantProviderContentV1::ZedDotDev { default_model } => {
220 default_model.map(|model| LanguageModelSelection {
221 provider: "zed.dev".into(),
222 model: model.id().to_string(),
223 })
224 }
225 AssistantProviderContentV1::OpenAi { default_model, .. } => {
226 default_model.map(|model| LanguageModelSelection {
227 provider: "openai".into(),
228 model: model.id().to_string(),
229 })
230 }
231 AssistantProviderContentV1::Anthropic { default_model, .. } => {
232 default_model.map(|model| LanguageModelSelection {
233 provider: "anthropic".into(),
234 model: model.id().to_string(),
235 })
236 }
237 AssistantProviderContentV1::Ollama { default_model, .. } => {
238 default_model.map(|model| LanguageModelSelection {
239 provider: "ollama".into(),
240 model: model.id().to_string(),
241 })
242 }
243 AssistantProviderContentV1::LmStudio { default_model, .. } => {
244 default_model.map(|model| LanguageModelSelection {
245 provider: "lmstudio".into(),
246 model: model.id().to_string(),
247 })
248 }
249 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
250 default_model.map(|model| LanguageModelSelection {
251 provider: "deepseek".into(),
252 model: model.id().to_string(),
253 })
254 }
255 }),
256 inline_assistant_model: None,
257 commit_message_model: None,
258 thread_summary_model: None,
259 inline_alternatives: None,
260 enable_experimental_live_diffs: None,
261 default_profile: None,
262 profiles: None,
263 always_allow_tool_actions: None,
264 notify_when_agent_waiting: None,
265 stream_edits: None,
266 single_file_review: None,
267 model_parameters: Vec::new(),
268 preferred_completion_mode: None,
269 },
270 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
271 },
272 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
273 enabled: None,
274 button: settings.button,
275 dock: settings.dock,
276 default_width: settings.default_width,
277 default_height: settings.default_height,
278 default_model: Some(LanguageModelSelection {
279 provider: "openai".into(),
280 model: settings
281 .default_open_ai_model
282 .clone()
283 .unwrap_or_default()
284 .id()
285 .to_string(),
286 }),
287 inline_assistant_model: None,
288 commit_message_model: None,
289 thread_summary_model: None,
290 inline_alternatives: None,
291 enable_experimental_live_diffs: None,
292 default_profile: None,
293 profiles: None,
294 always_allow_tool_actions: None,
295 notify_when_agent_waiting: None,
296 stream_edits: None,
297 single_file_review: None,
298 model_parameters: Vec::new(),
299 preferred_completion_mode: None,
300 },
301 None => AssistantSettingsContentV2::default(),
302 }
303 }
304
305 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
306 match &mut self.inner {
307 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
308 VersionedAssistantSettingsContent::V1(ref mut settings) => {
309 settings.dock = Some(dock);
310 }
311 VersionedAssistantSettingsContent::V2(ref mut settings) => {
312 settings.dock = Some(dock);
313 }
314 },
315 Some(AssistantSettingsContentInner::Legacy(settings)) => {
316 settings.dock = Some(dock);
317 }
318 None => {
319 self.inner = Some(AssistantSettingsContentInner::for_v2(
320 AssistantSettingsContentV2 {
321 dock: Some(dock),
322 ..Default::default()
323 },
324 ))
325 }
326 }
327 }
328
329 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
330 let model = language_model.id().0.to_string();
331 let provider = language_model.provider_id().0.to_string();
332
333 match &mut self.inner {
334 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
335 VersionedAssistantSettingsContent::V1(ref mut settings) => {
336 match provider.as_ref() {
337 "zed.dev" => {
338 log::warn!("attempted to set zed.dev model on outdated settings");
339 }
340 "anthropic" => {
341 let api_url = match &settings.provider {
342 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
343 api_url.clone()
344 }
345 _ => None,
346 };
347 settings.provider = Some(AssistantProviderContentV1::Anthropic {
348 default_model: AnthropicModel::from_id(&model).ok(),
349 api_url,
350 });
351 }
352 "ollama" => {
353 let api_url = match &settings.provider {
354 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
355 api_url.clone()
356 }
357 _ => None,
358 };
359 settings.provider = Some(AssistantProviderContentV1::Ollama {
360 default_model: Some(ollama::Model::new(
361 &model,
362 None,
363 None,
364 Some(language_model.supports_tools()),
365 )),
366 api_url,
367 });
368 }
369 "lmstudio" => {
370 let api_url = match &settings.provider {
371 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
372 api_url.clone()
373 }
374 _ => None,
375 };
376 settings.provider = Some(AssistantProviderContentV1::LmStudio {
377 default_model: Some(lmstudio::Model::new(&model, None, None)),
378 api_url,
379 });
380 }
381 "openai" => {
382 let (api_url, available_models) = match &settings.provider {
383 Some(AssistantProviderContentV1::OpenAi {
384 api_url,
385 available_models,
386 ..
387 }) => (api_url.clone(), available_models.clone()),
388 _ => (None, None),
389 };
390 settings.provider = Some(AssistantProviderContentV1::OpenAi {
391 default_model: OpenAiModel::from_id(&model).ok(),
392 api_url,
393 available_models,
394 });
395 }
396 "deepseek" => {
397 let api_url = match &settings.provider {
398 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
399 api_url.clone()
400 }
401 _ => None,
402 };
403 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
404 default_model: DeepseekModel::from_id(&model).ok(),
405 api_url,
406 });
407 }
408 _ => {}
409 }
410 }
411 VersionedAssistantSettingsContent::V2(ref mut settings) => {
412 settings.default_model = Some(LanguageModelSelection {
413 provider: provider.into(),
414 model,
415 });
416 }
417 },
418 Some(AssistantSettingsContentInner::Legacy(settings)) => {
419 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
420 settings.default_open_ai_model = Some(model);
421 }
422 }
423 None => {
424 self.inner = Some(AssistantSettingsContentInner::for_v2(
425 AssistantSettingsContentV2 {
426 default_model: Some(LanguageModelSelection {
427 provider: provider.into(),
428 model,
429 }),
430 ..Default::default()
431 },
432 ));
433 }
434 }
435 }
436
437 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
438 self.v2_setting(|setting| {
439 setting.inline_assistant_model = Some(LanguageModelSelection {
440 provider: provider.into(),
441 model,
442 });
443 Ok(())
444 })
445 .ok();
446 }
447
448 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
449 self.v2_setting(|setting| {
450 setting.commit_message_model = Some(LanguageModelSelection {
451 provider: provider.into(),
452 model,
453 });
454 Ok(())
455 })
456 .ok();
457 }
458
459 pub fn v2_setting(
460 &mut self,
461 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
462 ) -> anyhow::Result<()> {
463 match self.inner.get_or_insert_with(|| {
464 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
465 ..Default::default()
466 })
467 }) {
468 AssistantSettingsContentInner::Versioned(boxed) => {
469 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
470 f(settings)
471 } else {
472 Ok(())
473 }
474 }
475 _ => Ok(()),
476 }
477 }
478
479 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
480 self.v2_setting(|setting| {
481 setting.thread_summary_model = Some(LanguageModelSelection {
482 provider: provider.into(),
483 model,
484 });
485 Ok(())
486 })
487 .ok();
488 }
489
490 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
491 self.v2_setting(|setting| {
492 setting.always_allow_tool_actions = Some(allow);
493 Ok(())
494 })
495 .ok();
496 }
497
498 pub fn set_single_file_review(&mut self, allow: bool) {
499 self.v2_setting(|setting| {
500 setting.single_file_review = Some(allow);
501 Ok(())
502 })
503 .ok();
504 }
505
506 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
507 self.v2_setting(|setting| {
508 setting.default_profile = Some(profile_id);
509 Ok(())
510 })
511 .ok();
512 }
513
514 pub fn create_profile(
515 &mut self,
516 profile_id: AgentProfileId,
517 profile: AgentProfile,
518 ) -> Result<()> {
519 self.v2_setting(|settings| {
520 let profiles = settings.profiles.get_or_insert_default();
521 if profiles.contains_key(&profile_id) {
522 bail!("profile with ID '{profile_id}' already exists");
523 }
524
525 profiles.insert(
526 profile_id,
527 AgentProfileContent {
528 name: profile.name.into(),
529 tools: profile.tools,
530 enable_all_context_servers: Some(profile.enable_all_context_servers),
531 context_servers: profile
532 .context_servers
533 .into_iter()
534 .map(|(server_id, preset)| {
535 (
536 server_id,
537 ContextServerPresetContent {
538 tools: preset.tools,
539 },
540 )
541 })
542 .collect(),
543 },
544 );
545
546 Ok(())
547 })
548 }
549}
550
551#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
552#[serde(tag = "version")]
553pub enum VersionedAssistantSettingsContent {
554 #[serde(rename = "1")]
555 V1(AssistantSettingsContentV1),
556 #[serde(rename = "2")]
557 V2(AssistantSettingsContentV2),
558}
559
560impl Default for VersionedAssistantSettingsContent {
561 fn default() -> Self {
562 Self::V2(AssistantSettingsContentV2 {
563 enabled: None,
564 button: None,
565 dock: None,
566 default_width: None,
567 default_height: None,
568 default_model: None,
569 inline_assistant_model: None,
570 commit_message_model: None,
571 thread_summary_model: None,
572 inline_alternatives: None,
573 enable_experimental_live_diffs: None,
574 default_profile: None,
575 profiles: None,
576 always_allow_tool_actions: None,
577 notify_when_agent_waiting: None,
578 stream_edits: None,
579 single_file_review: None,
580 model_parameters: Vec::new(),
581 preferred_completion_mode: None,
582 })
583 }
584}
585
586#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
587pub struct AssistantSettingsContentV2 {
588 /// Whether the Assistant is enabled.
589 ///
590 /// Default: true
591 enabled: Option<bool>,
592 /// Whether to show the assistant panel button in the status bar.
593 ///
594 /// Default: true
595 button: Option<bool>,
596 /// Where to dock the assistant.
597 ///
598 /// Default: right
599 dock: Option<AssistantDockPosition>,
600 /// Default width in pixels when the assistant is docked to the left or right.
601 ///
602 /// Default: 640
603 default_width: Option<f32>,
604 /// Default height in pixels when the assistant is docked to the bottom.
605 ///
606 /// Default: 320
607 default_height: Option<f32>,
608 /// The default model to use when creating new chats and for other features when a specific model is not specified.
609 default_model: Option<LanguageModelSelection>,
610 /// Model to use for the inline assistant. Defaults to default_model when not specified.
611 inline_assistant_model: Option<LanguageModelSelection>,
612 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
613 commit_message_model: Option<LanguageModelSelection>,
614 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
615 thread_summary_model: Option<LanguageModelSelection>,
616 /// Additional models with which to generate alternatives when performing inline assists.
617 inline_alternatives: Option<Vec<LanguageModelSelection>>,
618 /// Enable experimental live diffs in the assistant panel.
619 ///
620 /// Default: false
621 enable_experimental_live_diffs: Option<bool>,
622 /// The default profile to use in the Agent.
623 ///
624 /// Default: write
625 default_profile: Option<AgentProfileId>,
626 /// The available agent profiles.
627 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
628 /// Whenever a tool action would normally wait for your confirmation
629 /// that you allow it, always choose to allow it.
630 ///
631 /// Default: false
632 always_allow_tool_actions: Option<bool>,
633 /// Where to show a popup notification when the agent is waiting for user input.
634 ///
635 /// Default: "primary_screen"
636 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
637 /// Whether to stream edits from the agent as they are received.
638 ///
639 /// Default: false
640 stream_edits: Option<bool>,
641 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
642 ///
643 /// Default: true
644 single_file_review: Option<bool>,
645 /// Additional parameters for language model requests. When making a request
646 /// to a model, parameters will be taken from the last entry in this list
647 /// that matches the model's provider and name. In each entry, both provider
648 /// and model are optional, so that you can specify parameters for either
649 /// one.
650 ///
651 /// Default: []
652 #[serde(default)]
653 model_parameters: Vec<LanguageModelParameters>,
654
655 /// What completion mode to enable for new threads
656 ///
657 /// Default: normal
658 preferred_completion_mode: Option<CompletionMode>,
659}
660
661#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
662#[serde(rename_all = "snake_case")]
663pub enum CompletionMode {
664 #[default]
665 Normal,
666 Max,
667}
668
669impl From<CompletionMode> for zed_llm_client::CompletionMode {
670 fn from(value: CompletionMode) -> Self {
671 match value {
672 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
673 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
674 }
675 }
676}
677
678#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
679pub struct LanguageModelSelection {
680 pub provider: LanguageModelProviderSetting,
681 pub model: String,
682}
683
684#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
685pub struct LanguageModelProviderSetting(pub String);
686
687impl JsonSchema for LanguageModelProviderSetting {
688 fn schema_name() -> String {
689 "LanguageModelProviderSetting".into()
690 }
691
692 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
693 schemars::schema::SchemaObject {
694 enum_values: Some(vec![
695 "anthropic".into(),
696 "bedrock".into(),
697 "google".into(),
698 "lmstudio".into(),
699 "ollama".into(),
700 "openai".into(),
701 "zed.dev".into(),
702 "copilot_chat".into(),
703 "deepseek".into(),
704 ]),
705 ..Default::default()
706 }
707 .into()
708 }
709}
710
711impl From<String> for LanguageModelProviderSetting {
712 fn from(provider: String) -> Self {
713 Self(provider)
714 }
715}
716
717impl From<&str> for LanguageModelProviderSetting {
718 fn from(provider: &str) -> Self {
719 Self(provider.to_string())
720 }
721}
722
723impl Default for LanguageModelSelection {
724 fn default() -> Self {
725 Self {
726 provider: LanguageModelProviderSetting("openai".to_string()),
727 model: "gpt-4".to_string(),
728 }
729 }
730}
731
732#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
733pub struct AgentProfileContent {
734 pub name: Arc<str>,
735 #[serde(default)]
736 pub tools: IndexMap<Arc<str>, bool>,
737 /// Whether all context servers are enabled by default.
738 pub enable_all_context_servers: Option<bool>,
739 #[serde(default)]
740 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
741}
742
743#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
744pub struct ContextServerPresetContent {
745 pub tools: IndexMap<Arc<str>, bool>,
746}
747
748#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
749pub struct AssistantSettingsContentV1 {
750 /// Whether the Assistant is enabled.
751 ///
752 /// Default: true
753 enabled: Option<bool>,
754 /// Whether to show the assistant panel button in the status bar.
755 ///
756 /// Default: true
757 button: Option<bool>,
758 /// Where to dock the assistant.
759 ///
760 /// Default: right
761 dock: Option<AssistantDockPosition>,
762 /// Default width in pixels when the assistant is docked to the left or right.
763 ///
764 /// Default: 640
765 default_width: Option<f32>,
766 /// Default height in pixels when the assistant is docked to the bottom.
767 ///
768 /// Default: 320
769 default_height: Option<f32>,
770 /// The provider of the assistant service.
771 ///
772 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
773 /// each with their respective default models and configurations.
774 provider: Option<AssistantProviderContentV1>,
775}
776
777#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
778pub struct LegacyAssistantSettingsContent {
779 /// Whether to show the assistant panel button in the status bar.
780 ///
781 /// Default: true
782 pub button: Option<bool>,
783 /// Where to dock the assistant.
784 ///
785 /// Default: right
786 pub dock: Option<AssistantDockPosition>,
787 /// Default width in pixels when the assistant is docked to the left or right.
788 ///
789 /// Default: 640
790 pub default_width: Option<f32>,
791 /// Default height in pixels when the assistant is docked to the bottom.
792 ///
793 /// Default: 320
794 pub default_height: Option<f32>,
795 /// The default OpenAI model to use when creating new chats.
796 ///
797 /// Default: gpt-4-1106-preview
798 pub default_open_ai_model: Option<OpenAiModel>,
799 /// OpenAI API base URL to use when creating new chats.
800 ///
801 /// Default: <https://api.openai.com/v1>
802 pub openai_api_url: Option<String>,
803}
804
805impl Settings for AssistantSettings {
806 const KEY: Option<&'static str> = Some("agent");
807
808 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
809
810 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
811
812 type FileContent = AssistantSettingsContent;
813
814 fn load(
815 sources: SettingsSources<Self::FileContent>,
816 _: &mut gpui::App,
817 ) -> anyhow::Result<Self> {
818 let mut settings = AssistantSettings::default();
819
820 for value in sources.defaults_and_customizations() {
821 if value.is_version_outdated() {
822 settings.using_outdated_settings_version = true;
823 }
824
825 let value = value.upgrade();
826 merge(&mut settings.enabled, value.enabled);
827 merge(&mut settings.button, value.button);
828 merge(&mut settings.dock, value.dock);
829 merge(
830 &mut settings.default_width,
831 value.default_width.map(Into::into),
832 );
833 merge(
834 &mut settings.default_height,
835 value.default_height.map(Into::into),
836 );
837 merge(&mut settings.default_model, value.default_model);
838 settings.inline_assistant_model = value
839 .inline_assistant_model
840 .or(settings.inline_assistant_model.take());
841 settings.commit_message_model = value
842 .commit_message_model
843 .or(settings.commit_message_model.take());
844 settings.thread_summary_model = value
845 .thread_summary_model
846 .or(settings.thread_summary_model.take());
847 merge(&mut settings.inline_alternatives, value.inline_alternatives);
848 merge(
849 &mut settings.enable_experimental_live_diffs,
850 value.enable_experimental_live_diffs,
851 );
852 merge(
853 &mut settings.always_allow_tool_actions,
854 value.always_allow_tool_actions,
855 );
856 merge(
857 &mut settings.notify_when_agent_waiting,
858 value.notify_when_agent_waiting,
859 );
860 merge(&mut settings.stream_edits, value.stream_edits);
861 merge(&mut settings.single_file_review, value.single_file_review);
862 merge(&mut settings.default_profile, value.default_profile);
863 merge(
864 &mut settings.preferred_completion_mode,
865 value.preferred_completion_mode,
866 );
867
868 settings
869 .model_parameters
870 .extend_from_slice(&value.model_parameters);
871
872 if let Some(profiles) = value.profiles {
873 settings
874 .profiles
875 .extend(profiles.into_iter().map(|(id, profile)| {
876 (
877 id,
878 AgentProfile {
879 name: profile.name.into(),
880 tools: profile.tools,
881 enable_all_context_servers: profile
882 .enable_all_context_servers
883 .unwrap_or_default(),
884 context_servers: profile
885 .context_servers
886 .into_iter()
887 .map(|(context_server_id, preset)| {
888 (
889 context_server_id,
890 ContextServerPreset {
891 tools: preset.tools.clone(),
892 },
893 )
894 })
895 .collect(),
896 },
897 )
898 }));
899 }
900 }
901
902 Ok(settings)
903 }
904
905 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
906 if let Some(b) = vscode
907 .read_value("chat.agent.enabled")
908 .and_then(|b| b.as_bool())
909 {
910 match &mut current.inner {
911 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
912 match versioned.as_mut() {
913 VersionedAssistantSettingsContent::V1(setting) => {
914 setting.enabled = Some(b);
915 setting.button = Some(b);
916 }
917
918 VersionedAssistantSettingsContent::V2(setting) => {
919 setting.enabled = Some(b);
920 setting.button = Some(b);
921 }
922 }
923 }
924 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
925 None => {
926 current.inner = Some(AssistantSettingsContentInner::for_v2(
927 AssistantSettingsContentV2 {
928 enabled: Some(b),
929 button: Some(b),
930 ..Default::default()
931 },
932 ));
933 }
934 }
935 }
936 }
937}
938
939fn merge<T>(target: &mut T, value: Option<T>) {
940 if let Some(value) = value {
941 *target = value;
942 }
943}
944
945#[cfg(test)]
946mod tests {
947 use fs::Fs;
948 use gpui::{ReadGlobal, TestAppContext};
949 use settings::SettingsStore;
950
951 use super::*;
952
953 #[gpui::test]
954 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
955 let fs = fs::FakeFs::new(cx.executor().clone());
956 fs.create_dir(paths::settings_file().parent().unwrap())
957 .await
958 .unwrap();
959
960 cx.update(|cx| {
961 let test_settings = settings::SettingsStore::test(cx);
962 cx.set_global(test_settings);
963 AssistantSettings::register(cx);
964 });
965
966 cx.update(|cx| {
967 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
968 assert_eq!(
969 AssistantSettings::get_global(cx).default_model,
970 LanguageModelSelection {
971 provider: "zed.dev".into(),
972 model: "claude-3-7-sonnet-latest".into(),
973 }
974 );
975 });
976
977 cx.update(|cx| {
978 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
979 fs.clone(),
980 |settings, _| {
981 *settings = AssistantSettingsContent {
982 inner: Some(AssistantSettingsContentInner::for_v2(
983 AssistantSettingsContentV2 {
984 default_model: Some(LanguageModelSelection {
985 provider: "test-provider".into(),
986 model: "gpt-99".into(),
987 }),
988 inline_assistant_model: None,
989 commit_message_model: None,
990 thread_summary_model: None,
991 inline_alternatives: None,
992 enabled: None,
993 button: None,
994 dock: None,
995 default_width: None,
996 default_height: None,
997 enable_experimental_live_diffs: None,
998 default_profile: None,
999 profiles: None,
1000 always_allow_tool_actions: None,
1001 notify_when_agent_waiting: None,
1002 stream_edits: None,
1003 single_file_review: None,
1004 model_parameters: Vec::new(),
1005 preferred_completion_mode: None,
1006 },
1007 )),
1008 }
1009 },
1010 );
1011 });
1012
1013 cx.run_until_parked();
1014
1015 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1016 assert!(raw_settings_value.contains(r#""version": "2""#));
1017
1018 #[derive(Debug, Deserialize)]
1019 struct AssistantSettingsTest {
1020 agent: AssistantSettingsContent,
1021 }
1022
1023 let assistant_settings: AssistantSettingsTest =
1024 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1025
1026 assert!(!assistant_settings.agent.is_version_outdated());
1027 }
1028
1029 #[gpui::test]
1030 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1031 let fs = fs::FakeFs::new(cx.executor().clone());
1032 fs.create_dir(paths::settings_file().parent().unwrap())
1033 .await
1034 .unwrap();
1035
1036 cx.update(|cx| {
1037 let mut test_settings = settings::SettingsStore::test(cx);
1038 let user_settings_content = r#"{
1039 "assistant": {
1040 "enabled": true,
1041 "version": "2",
1042 "default_model": {
1043 "provider": "zed.dev",
1044 "model": "gpt-99"
1045 },
1046 }}"#;
1047 test_settings
1048 .set_user_settings(user_settings_content, cx)
1049 .unwrap();
1050 cx.set_global(test_settings);
1051 AssistantSettings::register(cx);
1052 });
1053
1054 cx.run_until_parked();
1055
1056 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1057 assert!(assistant_settings.enabled);
1058 assert!(!assistant_settings.using_outdated_settings_version);
1059 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1060
1061 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1062 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1063 *settings = AssistantSettingsContent {
1064 inner: Some(AssistantSettingsContentInner::for_v2(
1065 AssistantSettingsContentV2 {
1066 enabled: Some(false),
1067 default_model: Some(LanguageModelSelection {
1068 provider: "xai".to_owned().into(),
1069 model: "grok".to_owned(),
1070 }),
1071 ..Default::default()
1072 },
1073 )),
1074 };
1075 });
1076 });
1077
1078 cx.run_until_parked();
1079
1080 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1081
1082 #[derive(Debug, Deserialize)]
1083 struct AssistantSettingsTest {
1084 assistant: AssistantSettingsContent,
1085 agent: Option<serde_json_lenient::Value>,
1086 }
1087
1088 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1089 assert!(assistant_settings.agent.is_none());
1090 }
1091}