1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use deepseek::Model as DeepseekModel;
9use feature_flags::{Assistant2FeatureFlag, FeatureFlagAppExt};
10use gpui::{App, Pixels};
11use indexmap::IndexMap;
12use language_model::{CloudModel, LanguageModel};
13use lmstudio::Model as LmStudioModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
22#[serde(rename_all = "snake_case")]
23pub enum AssistantDockPosition {
24 Left,
25 #[default]
26 Right,
27 Bottom,
28}
29
30#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
31#[serde(rename_all = "snake_case")]
32pub enum NotifyWhenAgentWaiting {
33 #[default]
34 PrimaryScreen,
35 AllScreens,
36 Never,
37}
38
39#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
40#[serde(tag = "name", rename_all = "snake_case")]
41pub enum AssistantProviderContentV1 {
42 #[serde(rename = "zed.dev")]
43 ZedDotDev { default_model: Option<CloudModel> },
44 #[serde(rename = "openai")]
45 OpenAi {
46 default_model: Option<OpenAiModel>,
47 api_url: Option<String>,
48 available_models: Option<Vec<OpenAiModel>>,
49 },
50 #[serde(rename = "anthropic")]
51 Anthropic {
52 default_model: Option<AnthropicModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "ollama")]
56 Ollama {
57 default_model: Option<OllamaModel>,
58 api_url: Option<String>,
59 },
60 #[serde(rename = "lmstudio")]
61 LmStudio {
62 default_model: Option<LmStudioModel>,
63 api_url: Option<String>,
64 },
65 #[serde(rename = "deepseek")]
66 DeepSeek {
67 default_model: Option<DeepseekModel>,
68 api_url: Option<String>,
69 },
70}
71
72#[derive(Debug, Default)]
73pub struct AssistantSettings {
74 pub enabled: bool,
75 pub button: bool,
76 pub dock: AssistantDockPosition,
77 pub default_width: Pixels,
78 pub default_height: Pixels,
79 pub default_model: LanguageModelSelection,
80 pub editor_model: LanguageModelSelection,
81 pub inline_alternatives: Vec<LanguageModelSelection>,
82 pub using_outdated_settings_version: bool,
83 pub enable_experimental_live_diffs: bool,
84 pub default_profile: Arc<str>,
85 pub profiles: IndexMap<Arc<str>, AgentProfile>,
86 pub always_allow_tool_actions: bool,
87 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
88}
89
90impl AssistantSettings {
91 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
92 if cx.has_flag::<Assistant2FeatureFlag>() {
93 return false;
94 }
95
96 cx.is_staff() || self.enable_experimental_live_diffs
97 }
98}
99
100/// Assistant panel settings
101#[derive(Clone, Serialize, Deserialize, Debug)]
102#[serde(untagged)]
103pub enum AssistantSettingsContent {
104 Versioned(VersionedAssistantSettingsContent),
105 Legacy(LegacyAssistantSettingsContent),
106}
107
108impl JsonSchema for AssistantSettingsContent {
109 fn schema_name() -> String {
110 VersionedAssistantSettingsContent::schema_name()
111 }
112
113 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
114 VersionedAssistantSettingsContent::json_schema(r#gen)
115 }
116
117 fn is_referenceable() -> bool {
118 VersionedAssistantSettingsContent::is_referenceable()
119 }
120}
121
122impl Default for AssistantSettingsContent {
123 fn default() -> Self {
124 Self::Versioned(VersionedAssistantSettingsContent::default())
125 }
126}
127
128impl AssistantSettingsContent {
129 pub fn is_version_outdated(&self) -> bool {
130 match self {
131 AssistantSettingsContent::Versioned(settings) => match settings {
132 VersionedAssistantSettingsContent::V1(_) => true,
133 VersionedAssistantSettingsContent::V2(_) => false,
134 },
135 AssistantSettingsContent::Legacy(_) => true,
136 }
137 }
138
139 fn upgrade(&self) -> AssistantSettingsContentV2 {
140 match self {
141 AssistantSettingsContent::Versioned(settings) => match settings {
142 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
143 enabled: settings.enabled,
144 button: settings.button,
145 dock: settings.dock,
146 default_width: settings.default_width,
147 default_height: settings.default_width,
148 default_model: settings
149 .provider
150 .clone()
151 .and_then(|provider| match provider {
152 AssistantProviderContentV1::ZedDotDev { default_model } => {
153 default_model.map(|model| LanguageModelSelection {
154 provider: "zed.dev".to_string(),
155 model: model.id().to_string(),
156 })
157 }
158 AssistantProviderContentV1::OpenAi { default_model, .. } => {
159 default_model.map(|model| LanguageModelSelection {
160 provider: "openai".to_string(),
161 model: model.id().to_string(),
162 })
163 }
164 AssistantProviderContentV1::Anthropic { default_model, .. } => {
165 default_model.map(|model| LanguageModelSelection {
166 provider: "anthropic".to_string(),
167 model: model.id().to_string(),
168 })
169 }
170 AssistantProviderContentV1::Ollama { default_model, .. } => {
171 default_model.map(|model| LanguageModelSelection {
172 provider: "ollama".to_string(),
173 model: model.id().to_string(),
174 })
175 }
176 AssistantProviderContentV1::LmStudio { default_model, .. } => {
177 default_model.map(|model| LanguageModelSelection {
178 provider: "lmstudio".to_string(),
179 model: model.id().to_string(),
180 })
181 }
182 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
183 default_model.map(|model| LanguageModelSelection {
184 provider: "deepseek".to_string(),
185 model: model.id().to_string(),
186 })
187 }
188 }),
189 editor_model: None,
190 inline_alternatives: None,
191 enable_experimental_live_diffs: None,
192 default_profile: None,
193 profiles: None,
194 always_allow_tool_actions: None,
195 notify_when_agent_waiting: None,
196 },
197 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
198 },
199 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
200 enabled: None,
201 button: settings.button,
202 dock: settings.dock,
203 default_width: settings.default_width,
204 default_height: settings.default_height,
205 default_model: Some(LanguageModelSelection {
206 provider: "openai".to_string(),
207 model: settings
208 .default_open_ai_model
209 .clone()
210 .unwrap_or_default()
211 .id()
212 .to_string(),
213 }),
214 editor_model: None,
215 inline_alternatives: None,
216 enable_experimental_live_diffs: None,
217 default_profile: None,
218 profiles: None,
219 always_allow_tool_actions: None,
220 notify_when_agent_waiting: None,
221 },
222 }
223 }
224
225 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
226 match self {
227 AssistantSettingsContent::Versioned(settings) => match settings {
228 VersionedAssistantSettingsContent::V1(settings) => {
229 settings.dock = Some(dock);
230 }
231 VersionedAssistantSettingsContent::V2(settings) => {
232 settings.dock = Some(dock);
233 }
234 },
235 AssistantSettingsContent::Legacy(settings) => {
236 settings.dock = Some(dock);
237 }
238 }
239 }
240
241 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
242 let model = language_model.id().0.to_string();
243 let provider = language_model.provider_id().0.to_string();
244
245 match self {
246 AssistantSettingsContent::Versioned(settings) => match settings {
247 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
248 "zed.dev" => {
249 log::warn!("attempted to set zed.dev model on outdated settings");
250 }
251 "anthropic" => {
252 let api_url = match &settings.provider {
253 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
254 api_url.clone()
255 }
256 _ => None,
257 };
258 settings.provider = Some(AssistantProviderContentV1::Anthropic {
259 default_model: AnthropicModel::from_id(&model).ok(),
260 api_url,
261 });
262 }
263 "ollama" => {
264 let api_url = match &settings.provider {
265 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
266 api_url.clone()
267 }
268 _ => None,
269 };
270 settings.provider = Some(AssistantProviderContentV1::Ollama {
271 default_model: Some(ollama::Model::new(&model, None, None)),
272 api_url,
273 });
274 }
275 "lmstudio" => {
276 let api_url = match &settings.provider {
277 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
278 api_url.clone()
279 }
280 _ => None,
281 };
282 settings.provider = Some(AssistantProviderContentV1::LmStudio {
283 default_model: Some(lmstudio::Model::new(&model, None, None)),
284 api_url,
285 });
286 }
287 "openai" => {
288 let (api_url, available_models) = match &settings.provider {
289 Some(AssistantProviderContentV1::OpenAi {
290 api_url,
291 available_models,
292 ..
293 }) => (api_url.clone(), available_models.clone()),
294 _ => (None, None),
295 };
296 settings.provider = Some(AssistantProviderContentV1::OpenAi {
297 default_model: OpenAiModel::from_id(&model).ok(),
298 api_url,
299 available_models,
300 });
301 }
302 "deepseek" => {
303 let api_url = match &settings.provider {
304 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
305 api_url.clone()
306 }
307 _ => None,
308 };
309 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
310 default_model: DeepseekModel::from_id(&model).ok(),
311 api_url,
312 });
313 }
314 _ => {}
315 },
316 VersionedAssistantSettingsContent::V2(settings) => {
317 settings.default_model = Some(LanguageModelSelection { provider, model });
318 }
319 },
320 AssistantSettingsContent::Legacy(settings) => {
321 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
322 settings.default_open_ai_model = Some(model);
323 }
324 }
325 }
326 }
327
328 pub fn set_profile(&mut self, profile_id: Arc<str>) {
329 let AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(settings)) =
330 self
331 else {
332 return;
333 };
334
335 settings.default_profile = Some(profile_id);
336 }
337
338 pub fn create_profile(&mut self, profile_id: Arc<str>, profile: AgentProfile) -> Result<()> {
339 let AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(settings)) =
340 self
341 else {
342 return Ok(());
343 };
344
345 let profiles = settings.profiles.get_or_insert_default();
346 if profiles.contains_key(&profile_id) {
347 bail!("profile with ID '{profile_id}' already exists");
348 }
349
350 profiles.insert(
351 profile_id,
352 AgentProfileContent {
353 name: profile.name.into(),
354 tools: profile.tools,
355 context_servers: profile
356 .context_servers
357 .into_iter()
358 .map(|(server_id, preset)| {
359 (
360 server_id,
361 ContextServerPresetContent {
362 tools: preset.tools,
363 },
364 )
365 })
366 .collect(),
367 },
368 );
369
370 Ok(())
371 }
372}
373
374#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
375#[serde(tag = "version")]
376pub enum VersionedAssistantSettingsContent {
377 #[serde(rename = "1")]
378 V1(AssistantSettingsContentV1),
379 #[serde(rename = "2")]
380 V2(AssistantSettingsContentV2),
381}
382
383impl Default for VersionedAssistantSettingsContent {
384 fn default() -> Self {
385 Self::V2(AssistantSettingsContentV2 {
386 enabled: None,
387 button: None,
388 dock: None,
389 default_width: None,
390 default_height: None,
391 default_model: None,
392 editor_model: None,
393 inline_alternatives: None,
394 enable_experimental_live_diffs: None,
395 default_profile: None,
396 profiles: None,
397 always_allow_tool_actions: None,
398 notify_when_agent_waiting: None,
399 })
400 }
401}
402
403#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
404pub struct AssistantSettingsContentV2 {
405 /// Whether the Assistant is enabled.
406 ///
407 /// Default: true
408 enabled: Option<bool>,
409 /// Whether to show the assistant panel button in the status bar.
410 ///
411 /// Default: true
412 button: Option<bool>,
413 /// Where to dock the assistant.
414 ///
415 /// Default: right
416 dock: Option<AssistantDockPosition>,
417 /// Default width in pixels when the assistant is docked to the left or right.
418 ///
419 /// Default: 640
420 default_width: Option<f32>,
421 /// Default height in pixels when the assistant is docked to the bottom.
422 ///
423 /// Default: 320
424 default_height: Option<f32>,
425 /// The default model to use when creating new chats.
426 default_model: Option<LanguageModelSelection>,
427 /// The model to use when applying edits from the assistant.
428 editor_model: Option<LanguageModelSelection>,
429 /// Additional models with which to generate alternatives when performing inline assists.
430 inline_alternatives: Option<Vec<LanguageModelSelection>>,
431 /// Enable experimental live diffs in the assistant panel.
432 ///
433 /// Default: false
434 enable_experimental_live_diffs: Option<bool>,
435 #[schemars(skip)]
436 default_profile: Option<Arc<str>>,
437 #[schemars(skip)]
438 pub profiles: Option<IndexMap<Arc<str>, AgentProfileContent>>,
439 /// Whenever a tool action would normally wait for your confirmation
440 /// that you allow it, always choose to allow it.
441 ///
442 /// Default: false
443 always_allow_tool_actions: Option<bool>,
444 /// Where to show a popup notification when the agent is waiting for user input.
445 ///
446 /// Default: "primary_screen"
447 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
448}
449
450#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
451pub struct LanguageModelSelection {
452 #[schemars(schema_with = "providers_schema")]
453 pub provider: String,
454 pub model: String,
455}
456
457fn providers_schema(_: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
458 schemars::schema::SchemaObject {
459 enum_values: Some(vec![
460 "anthropic".into(),
461 "bedrock".into(),
462 "google".into(),
463 "lmstudio".into(),
464 "ollama".into(),
465 "openai".into(),
466 "zed.dev".into(),
467 "copilot_chat".into(),
468 "deepseek".into(),
469 ]),
470 ..Default::default()
471 }
472 .into()
473}
474
475impl Default for LanguageModelSelection {
476 fn default() -> Self {
477 Self {
478 provider: "openai".to_string(),
479 model: "gpt-4".to_string(),
480 }
481 }
482}
483
484#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
485pub struct AgentProfileContent {
486 pub name: Arc<str>,
487 pub tools: IndexMap<Arc<str>, bool>,
488 #[serde(default)]
489 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
490}
491
492#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
493pub struct ContextServerPresetContent {
494 pub tools: IndexMap<Arc<str>, bool>,
495}
496
497#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
498pub struct AssistantSettingsContentV1 {
499 /// Whether the Assistant is enabled.
500 ///
501 /// Default: true
502 enabled: Option<bool>,
503 /// Whether to show the assistant panel button in the status bar.
504 ///
505 /// Default: true
506 button: Option<bool>,
507 /// Where to dock the assistant.
508 ///
509 /// Default: right
510 dock: Option<AssistantDockPosition>,
511 /// Default width in pixels when the assistant is docked to the left or right.
512 ///
513 /// Default: 640
514 default_width: Option<f32>,
515 /// Default height in pixels when the assistant is docked to the bottom.
516 ///
517 /// Default: 320
518 default_height: Option<f32>,
519 /// The provider of the assistant service.
520 ///
521 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
522 /// each with their respective default models and configurations.
523 provider: Option<AssistantProviderContentV1>,
524}
525
526#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
527pub struct LegacyAssistantSettingsContent {
528 /// Whether to show the assistant panel button in the status bar.
529 ///
530 /// Default: true
531 pub button: Option<bool>,
532 /// Where to dock the assistant.
533 ///
534 /// Default: right
535 pub dock: Option<AssistantDockPosition>,
536 /// Default width in pixels when the assistant is docked to the left or right.
537 ///
538 /// Default: 640
539 pub default_width: Option<f32>,
540 /// Default height in pixels when the assistant is docked to the bottom.
541 ///
542 /// Default: 320
543 pub default_height: Option<f32>,
544 /// The default OpenAI model to use when creating new chats.
545 ///
546 /// Default: gpt-4-1106-preview
547 pub default_open_ai_model: Option<OpenAiModel>,
548 /// OpenAI API base URL to use when creating new chats.
549 ///
550 /// Default: <https://api.openai.com/v1>
551 pub openai_api_url: Option<String>,
552}
553
554impl Settings for AssistantSettings {
555 const KEY: Option<&'static str> = Some("assistant");
556
557 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
558
559 type FileContent = AssistantSettingsContent;
560
561 fn load(
562 sources: SettingsSources<Self::FileContent>,
563 _: &mut gpui::App,
564 ) -> anyhow::Result<Self> {
565 let mut settings = AssistantSettings::default();
566
567 for value in sources.defaults_and_customizations() {
568 if value.is_version_outdated() {
569 settings.using_outdated_settings_version = true;
570 }
571
572 let value = value.upgrade();
573 merge(&mut settings.enabled, value.enabled);
574 merge(&mut settings.button, value.button);
575 merge(&mut settings.dock, value.dock);
576 merge(
577 &mut settings.default_width,
578 value.default_width.map(Into::into),
579 );
580 merge(
581 &mut settings.default_height,
582 value.default_height.map(Into::into),
583 );
584 merge(&mut settings.default_model, value.default_model);
585 merge(&mut settings.editor_model, value.editor_model);
586 merge(&mut settings.inline_alternatives, value.inline_alternatives);
587 merge(
588 &mut settings.enable_experimental_live_diffs,
589 value.enable_experimental_live_diffs,
590 );
591 merge(
592 &mut settings.always_allow_tool_actions,
593 value.always_allow_tool_actions,
594 );
595 merge(
596 &mut settings.notify_when_agent_waiting,
597 value.notify_when_agent_waiting,
598 );
599 merge(&mut settings.default_profile, value.default_profile);
600
601 if let Some(profiles) = value.profiles {
602 settings
603 .profiles
604 .extend(profiles.into_iter().map(|(id, profile)| {
605 (
606 id,
607 AgentProfile {
608 name: profile.name.into(),
609 tools: profile.tools,
610 context_servers: profile
611 .context_servers
612 .into_iter()
613 .map(|(context_server_id, preset)| {
614 (
615 context_server_id,
616 ContextServerPreset {
617 tools: preset.tools.clone(),
618 },
619 )
620 })
621 .collect(),
622 },
623 )
624 }));
625 }
626 }
627
628 Ok(settings)
629 }
630}
631
632fn merge<T>(target: &mut T, value: Option<T>) {
633 if let Some(value) = value {
634 *target = value;
635 }
636}
637
638#[cfg(test)]
639mod tests {
640 use fs::Fs;
641 use gpui::{ReadGlobal, TestAppContext};
642
643 use super::*;
644
645 #[gpui::test]
646 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
647 let fs = fs::FakeFs::new(cx.executor().clone());
648 fs.create_dir(paths::settings_file().parent().unwrap())
649 .await
650 .unwrap();
651
652 cx.update(|cx| {
653 let test_settings = settings::SettingsStore::test(cx);
654 cx.set_global(test_settings);
655 AssistantSettings::register(cx);
656 });
657
658 cx.update(|cx| {
659 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
660 assert_eq!(
661 AssistantSettings::get_global(cx).default_model,
662 LanguageModelSelection {
663 provider: "zed.dev".into(),
664 model: "claude-3-5-sonnet-latest".into(),
665 }
666 );
667 });
668
669 cx.update(|cx| {
670 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
671 fs.clone(),
672 |settings, _| {
673 *settings = AssistantSettingsContent::Versioned(
674 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
675 default_model: Some(LanguageModelSelection {
676 provider: "test-provider".into(),
677 model: "gpt-99".into(),
678 }),
679 editor_model: Some(LanguageModelSelection {
680 provider: "test-provider".into(),
681 model: "gpt-99".into(),
682 }),
683 inline_alternatives: None,
684 enabled: None,
685 button: None,
686 dock: None,
687 default_width: None,
688 default_height: None,
689 enable_experimental_live_diffs: None,
690 default_profile: None,
691 profiles: None,
692 always_allow_tool_actions: None,
693 notify_when_agent_waiting: None,
694 }),
695 )
696 },
697 );
698 });
699
700 cx.run_until_parked();
701
702 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
703 assert!(raw_settings_value.contains(r#""version": "2""#));
704
705 #[derive(Debug, Deserialize)]
706 struct AssistantSettingsTest {
707 assistant: AssistantSettingsContent,
708 }
709
710 let assistant_settings: AssistantSettingsTest =
711 serde_json_lenient::from_str(&raw_settings_value).unwrap();
712
713 assert!(!assistant_settings.assistant.is_version_outdated());
714 }
715}