1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Debug, Default)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub editor_model: LanguageModelSelection,
81 pub inline_alternatives: Vec<LanguageModelSelection>,
82 pub using_outdated_settings_version: bool,
83 pub enable_experimental_live_diffs: bool,
84 pub default_profile: AgentProfileId,
85 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
86 pub always_allow_tool_actions: bool,
87 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
88}
89
90impl AssistantSettings {
91 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
92 if cx.has_flag::<Assistant2FeatureFlag>() {
93 return false;
94 }
95
96 cx.is_staff() || self.enable_experimental_live_diffs
97 }
98}
99
100/// Assistant panel settings
101#[derive(Clone, Serialize, Deserialize, Debug)]
102#[serde(untagged)]
103pub enum AssistantSettingsContent {
104 Versioned(VersionedAssistantSettingsContent),
105 Legacy(LegacyAssistantSettingsContent),
106}
107
108impl JsonSchema for AssistantSettingsContent {
109 fn schema_name() -> String {
110 VersionedAssistantSettingsContent::schema_name()
111 }
112
113 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
114 VersionedAssistantSettingsContent::json_schema(r#gen)
115 }
116
117 fn is_referenceable() -> bool {
118 VersionedAssistantSettingsContent::is_referenceable()
119 }
120}
121
122impl Default for AssistantSettingsContent {
123 fn default() -> Self {
124 Self::Versioned(VersionedAssistantSettingsContent::default())
125 }
126}
127
128impl AssistantSettingsContent {
129 pub fn is_version_outdated(&self) -> bool {
130 match self {
131 AssistantSettingsContent::Versioned(settings) => match settings {
132 VersionedAssistantSettingsContent::V1(_) => true,
133 VersionedAssistantSettingsContent::V2(_) => false,
134 },
135 AssistantSettingsContent::Legacy(_) => true,
136 }
137 }
138
139 fn upgrade(&self) -> AssistantSettingsContentV2 {
140 match self {
141 AssistantSettingsContent::Versioned(settings) => match settings {
142 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
143 enabled: settings.enabled,
144 button: settings.button,
145 dock: settings.dock,
146 default_width: settings.default_width,
147 default_height: settings.default_width,
148 default_model: settings
149 .provider
150 .clone()
151 .and_then(|provider| match provider {
152 AssistantProviderContentV1::ZedDotDev { default_model } => {
153 default_model.map(|model| LanguageModelSelection {
154 provider: "zed.dev".to_string(),
155 model: model.id().to_string(),
156 })
157 }
158 AssistantProviderContentV1::OpenAi { default_model, .. } => {
159 default_model.map(|model| LanguageModelSelection {
160 provider: "openai".to_string(),
161 model: model.id().to_string(),
162 })
163 }
164 AssistantProviderContentV1::Anthropic { default_model, .. } => {
165 default_model.map(|model| LanguageModelSelection {
166 provider: "anthropic".to_string(),
167 model: model.id().to_string(),
168 })
169 }
170 AssistantProviderContentV1::Ollama { default_model, .. } => {
171 default_model.map(|model| LanguageModelSelection {
172 provider: "ollama".to_string(),
173 model: model.id().to_string(),
174 })
175 }
176 AssistantProviderContentV1::LmStudio { default_model, .. } => {
177 default_model.map(|model| LanguageModelSelection {
178 provider: "lmstudio".to_string(),
179 model: model.id().to_string(),
180 })
181 }
182 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
183 default_model.map(|model| LanguageModelSelection {
184 provider: "deepseek".to_string(),
185 model: model.id().to_string(),
186 })
187 }
188 }),
189 editor_model: None,
190 inline_alternatives: None,
191 enable_experimental_live_diffs: None,
192 default_profile: None,
193 profiles: None,
194 always_allow_tool_actions: None,
195 notify_when_agent_waiting: None,
196 },
197 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
198 },
199 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
200 enabled: None,
201 button: settings.button,
202 dock: settings.dock,
203 default_width: settings.default_width,
204 default_height: settings.default_height,
205 default_model: Some(LanguageModelSelection {
206 provider: "openai".to_string(),
207 model: settings
208 .default_open_ai_model
209 .clone()
210 .unwrap_or_default()
211 .id()
212 .to_string(),
213 }),
214 editor_model: None,
215 inline_alternatives: None,
216 enable_experimental_live_diffs: None,
217 default_profile: None,
218 profiles: None,
219 always_allow_tool_actions: None,
220 notify_when_agent_waiting: None,
221 },
222 }
223 }
224
225 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
226 match self {
227 AssistantSettingsContent::Versioned(settings) => match settings {
228 VersionedAssistantSettingsContent::V1(settings) => {
229 settings.dock = Some(dock);
230 }
231 VersionedAssistantSettingsContent::V2(settings) => {
232 settings.dock = Some(dock);
233 }
234 },
235 AssistantSettingsContent::Legacy(settings) => {
236 settings.dock = Some(dock);
237 }
238 }
239 }
240
241 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
242 let model = language_model.id().0.to_string();
243 let provider = language_model.provider_id().0.to_string();
244
245 match self {
246 AssistantSettingsContent::Versioned(settings) => match settings {
247 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
248 "zed.dev" => {
249 log::warn!("attempted to set zed.dev model on outdated settings");
250 }
251 "anthropic" => {
252 let api_url = match &settings.provider {
253 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
254 api_url.clone()
255 }
256 _ => None,
257 };
258 settings.provider = Some(AssistantProviderContentV1::Anthropic {
259 default_model: AnthropicModel::from_id(&model).ok(),
260 api_url,
261 });
262 }
263 "ollama" => {
264 let api_url = match &settings.provider {
265 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
266 api_url.clone()
267 }
268 _ => None,
269 };
270 settings.provider = Some(AssistantProviderContentV1::Ollama {
271 default_model: Some(ollama::Model::new(&model, None, None)),
272 api_url,
273 });
274 }
275 "lmstudio" => {
276 let api_url = match &settings.provider {
277 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
278 api_url.clone()
279 }
280 _ => None,
281 };
282 settings.provider = Some(AssistantProviderContentV1::LmStudio {
283 default_model: Some(lmstudio::Model::new(&model, None, None)),
284 api_url,
285 });
286 }
287 "openai" => {
288 let (api_url, available_models) = match &settings.provider {
289 Some(AssistantProviderContentV1::OpenAi {
290 api_url,
291 available_models,
292 ..
293 }) => (api_url.clone(), available_models.clone()),
294 _ => (None, None),
295 };
296 settings.provider = Some(AssistantProviderContentV1::OpenAi {
297 default_model: OpenAiModel::from_id(&model).ok(),
298 api_url,
299 available_models,
300 });
301 }
302 "deepseek" => {
303 let api_url = match &settings.provider {
304 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
305 api_url.clone()
306 }
307 _ => None,
308 };
309 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
310 default_model: DeepseekModel::from_id(&model).ok(),
311 api_url,
312 });
313 }
314 _ => {}
315 },
316 VersionedAssistantSettingsContent::V2(settings) => {
317 settings.default_model = Some(LanguageModelSelection { provider, model });
318 }
319 },
320 AssistantSettingsContent::Legacy(settings) => {
321 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
322 settings.default_open_ai_model = Some(model);
323 }
324 }
325 }
326 }
327
328 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
329 let AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(settings)) =
330 self
331 else {
332 return;
333 };
334
335 settings.default_profile = Some(profile_id);
336 }
337
338 pub fn create_profile(
339 &mut self,
340 profile_id: AgentProfileId,
341 profile: AgentProfile,
342 ) -> Result<()> {
343 let AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(settings)) =
344 self
345 else {
346 return Ok(());
347 };
348
349 let profiles = settings.profiles.get_or_insert_default();
350 if profiles.contains_key(&profile_id) {
351 bail!("profile with ID '{profile_id}' already exists");
352 }
353
354 profiles.insert(
355 profile_id,
356 AgentProfileContent {
357 name: profile.name.into(),
358 tools: profile.tools,
359 enable_all_context_servers: Some(profile.enable_all_context_servers),
360 context_servers: profile
361 .context_servers
362 .into_iter()
363 .map(|(server_id, preset)| {
364 (
365 server_id,
366 ContextServerPresetContent {
367 tools: preset.tools,
368 },
369 )
370 })
371 .collect(),
372 },
373 );
374
375 Ok(())
376 }
377}
378
379#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
380#[serde(tag = "version")]
381pub enum VersionedAssistantSettingsContent {
382 #[serde(rename = "1")]
383 V1(AssistantSettingsContentV1),
384 #[serde(rename = "2")]
385 V2(AssistantSettingsContentV2),
386}
387
388impl Default for VersionedAssistantSettingsContent {
389 fn default() -> Self {
390 Self::V2(AssistantSettingsContentV2 {
391 enabled: None,
392 button: None,
393 dock: None,
394 default_width: None,
395 default_height: None,
396 default_model: None,
397 editor_model: None,
398 inline_alternatives: None,
399 enable_experimental_live_diffs: None,
400 default_profile: None,
401 profiles: None,
402 always_allow_tool_actions: None,
403 notify_when_agent_waiting: None,
404 })
405 }
406}
407
408#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
409pub struct AssistantSettingsContentV2 {
410 /// Whether the Assistant is enabled.
411 ///
412 /// Default: true
413 enabled: Option<bool>,
414 /// Whether to show the assistant panel button in the status bar.
415 ///
416 /// Default: true
417 button: Option<bool>,
418 /// Where to dock the assistant.
419 ///
420 /// Default: right
421 dock: Option<AssistantDockPosition>,
422 /// Default width in pixels when the assistant is docked to the left or right.
423 ///
424 /// Default: 640
425 default_width: Option<f32>,
426 /// Default height in pixels when the assistant is docked to the bottom.
427 ///
428 /// Default: 320
429 default_height: Option<f32>,
430 /// The default model to use when creating new chats.
431 default_model: Option<LanguageModelSelection>,
432 /// The model to use when applying edits from the assistant.
433 editor_model: Option<LanguageModelSelection>,
434 /// Additional models with which to generate alternatives when performing inline assists.
435 inline_alternatives: Option<Vec<LanguageModelSelection>>,
436 /// Enable experimental live diffs in the assistant panel.
437 ///
438 /// Default: false
439 enable_experimental_live_diffs: Option<bool>,
440 /// The default profile to use in the Agent.
441 ///
442 /// Default: write
443 default_profile: Option<AgentProfileId>,
444 /// The available agent profiles.
445 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
446 /// Whenever a tool action would normally wait for your confirmation
447 /// that you allow it, always choose to allow it.
448 ///
449 /// Default: false
450 always_allow_tool_actions: Option<bool>,
451 /// Where to show a popup notification when the agent is waiting for user input.
452 ///
453 /// Default: "primary_screen"
454 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
455}
456
457#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
458pub struct LanguageModelSelection {
459 #[schemars(schema_with = "providers_schema")]
460 pub provider: String,
461 pub model: String,
462}
463
464fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
465 schemars::schema::SchemaObject {
466 enum_values: Some(vec![
467 "anthropic".into(),
468 "bedrock".into(),
469 "google".into(),
470 "lmstudio".into(),
471 "ollama".into(),
472 "openai".into(),
473 "zed.dev".into(),
474 "copilot_chat".into(),
475 "deepseek".into(),
476 ]),
477 ..Default::default()
478 }
479 .into()
480}
481
482impl Default for LanguageModelSelection {
483 fn default() -> Self {
484 Self {
485 provider: "openai".to_string(),
486 model: "gpt-4".to_string(),
487 }
488 }
489}
490
491#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
492pub struct AgentProfileContent {
493 pub name: Arc<str>,
494 #[serde(default)]
495 pub tools: IndexMap<Arc<str>, bool>,
496 /// Whether all context servers are enabled by default.
497 pub enable_all_context_servers: Option<bool>,
498 #[serde(default)]
499 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
500}
501
502#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
503pub struct ContextServerPresetContent {
504 pub tools: IndexMap<Arc<str>, bool>,
505}
506
507#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
508pub struct AssistantSettingsContentV1 {
509 /// Whether the Assistant is enabled.
510 ///
511 /// Default: true
512 enabled: Option<bool>,
513 /// Whether to show the assistant panel button in the status bar.
514 ///
515 /// Default: true
516 button: Option<bool>,
517 /// Where to dock the assistant.
518 ///
519 /// Default: right
520 dock: Option<AssistantDockPosition>,
521 /// Default width in pixels when the assistant is docked to the left or right.
522 ///
523 /// Default: 640
524 default_width: Option<f32>,
525 /// Default height in pixels when the assistant is docked to the bottom.
526 ///
527 /// Default: 320
528 default_height: Option<f32>,
529 /// The provider of the assistant service.
530 ///
531 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
532 /// each with their respective default models and configurations.
533 provider: Option<AssistantProviderContentV1>,
534}
535
536#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
537pub struct LegacyAssistantSettingsContent {
538 /// Whether to show the assistant panel button in the status bar.
539 ///
540 /// Default: true
541 pub button: Option<bool>,
542 /// Where to dock the assistant.
543 ///
544 /// Default: right
545 pub dock: Option<AssistantDockPosition>,
546 /// Default width in pixels when the assistant is docked to the left or right.
547 ///
548 /// Default: 640
549 pub default_width: Option<f32>,
550 /// Default height in pixels when the assistant is docked to the bottom.
551 ///
552 /// Default: 320
553 pub default_height: Option<f32>,
554 /// The default OpenAI model to use when creating new chats.
555 ///
556 /// Default: gpt-4-1106-preview
557 pub default_open_ai_model: Option<OpenAiModel>,
558 /// OpenAI API base URL to use when creating new chats.
559 ///
560 /// Default: <https://api.openai.com/v1>
561 pub openai_api_url: Option<String>,
562}
563
564impl Settings for AssistantSettings {
565 const KEY: Option<&'static str> = Some("assistant");
566
567 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
568
569 type FileContent = AssistantSettingsContent;
570
571 fn load(
572 sources: SettingsSources<Self::FileContent>,
573 _: &mut gpui::App,
574 ) -> anyhow::Result<Self> {
575 let mut settings = AssistantSettings::default();
576
577 for value in sources.defaults_and_customizations() {
578 if value.is_version_outdated() {
579 settings.using_outdated_settings_version = true;
580 }
581
582 let value = value.upgrade();
583 merge(&mut settings.enabled, value.enabled);
584 merge(&mut settings.button, value.button);
585 merge(&mut settings.dock, value.dock);
586 merge(
587 &mut settings.default_width,
588 value.default_width.map(Into::into),
589 );
590 merge(
591 &mut settings.default_height,
592 value.default_height.map(Into::into),
593 );
594 merge(&mut settings.default_model, value.default_model);
595 merge(&mut settings.editor_model, value.editor_model);
596 merge(&mut settings.inline_alternatives, value.inline_alternatives);
597 merge(
598 &mut settings.enable_experimental_live_diffs,
599 value.enable_experimental_live_diffs,
600 );
601 merge(
602 &mut settings.always_allow_tool_actions,
603 value.always_allow_tool_actions,
604 );
605 merge(
606 &mut settings.notify_when_agent_waiting,
607 value.notify_when_agent_waiting,
608 );
609 merge(&mut settings.default_profile, value.default_profile);
610
611 if let Some(profiles) = value.profiles {
612 settings
613 .profiles
614 .extend(profiles.into_iter().map(|(id, profile)| {
615 (
616 id,
617 AgentProfile {
618 name: profile.name.into(),
619 tools: profile.tools,
620 enable_all_context_servers: profile
621 .enable_all_context_servers
622 .unwrap_or_default(),
623 context_servers: profile
624 .context_servers
625 .into_iter()
626 .map(|(context_server_id, preset)| {
627 (
628 context_server_id,
629 ContextServerPreset {
630 tools: preset.tools.clone(),
631 },
632 )
633 })
634 .collect(),
635 },
636 )
637 }));
638 }
639 }
640
641 Ok(settings)
642 }
643}
644
645fn merge<T>(target: &mut T, value: Option<T>) {
646 if let Some(value) = value {
647 *target = value;
648 }
649}
650
651#[cfg(test)]
652mod tests {
653 use fs::Fs;
654 use gpui::{ReadGlobal, TestAppContext};
655
656 use super::*;
657
658 #[gpui::test]
659 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
660 let fs = fs::FakeFs::new(cx.executor().clone());
661 fs.create_dir(paths::settings_file().parent().unwrap())
662 .await
663 .unwrap();
664
665 cx.update(|cx| {
666 let test_settings = settings::SettingsStore::test(cx);
667 cx.set_global(test_settings);
668 AssistantSettings::register(cx);
669 });
670
671 cx.update(|cx| {
672 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
673 assert_eq!(
674 AssistantSettings::get_global(cx).default_model,
675 LanguageModelSelection {
676 provider: "zed.dev".into(),
677 model: "claude-3-5-sonnet-latest".into(),
678 }
679 );
680 });
681
682 cx.update(|cx| {
683 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
684 fs.clone(),
685 |settings, _| {
686 *settings = AssistantSettingsContent::Versioned(
687 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
688 default_model: Some(LanguageModelSelection {
689 provider: "test-provider".into(),
690 model: "gpt-99".into(),
691 }),
692 editor_model: Some(LanguageModelSelection {
693 provider: "test-provider".into(),
694 model: "gpt-99".into(),
695 }),
696 inline_alternatives: None,
697 enabled: None,
698 button: None,
699 dock: None,
700 default_width: None,
701 default_height: None,
702 enable_experimental_live_diffs: None,
703 default_profile: None,
704 profiles: None,
705 always_allow_tool_actions: None,
706 notify_when_agent_waiting: None,
707 }),
708 )
709 },
710 );
711 });
712
713 cx.run_until_parked();
714
715 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
716 assert!(raw_settings_value.contains(r#""version": "2""#));
717
718 #[derive(Debug, Deserialize)]
719 struct AssistantSettingsTest {
720 assistant: AssistantSettingsContent,
721 }
722
723 let assistant_settings: AssistantSettingsTest =
724 serde_json_lenient::from_str(&raw_settings_value).unwrap();
725
726 assert!(!assistant_settings.assistant.is_version_outdated());
727 }
728}