1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use deepseek::Model as DeepseekModel;
8use feature_flags::FeatureFlagAppExt;
9use gpui::{App, Pixels};
10use indexmap::IndexMap;
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{schema::Schema, JsonSchema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
21#[serde(rename_all = "snake_case")]
22pub enum AssistantDockPosition {
23 Left,
24 #[default]
25 Right,
26 Bottom,
27}
28
29#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
30#[serde(tag = "name", rename_all = "snake_case")]
31pub enum AssistantProviderContentV1 {
32 #[serde(rename = "zed.dev")]
33 ZedDotDev { default_model: Option<CloudModel> },
34 #[serde(rename = "openai")]
35 OpenAi {
36 default_model: Option<OpenAiModel>,
37 api_url: Option<String>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 },
45 #[serde(rename = "ollama")]
46 Ollama {
47 default_model: Option<OllamaModel>,
48 api_url: Option<String>,
49 },
50 #[serde(rename = "lmstudio")]
51 LmStudio {
52 default_model: Option<LmStudioModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "deepseek")]
56 DeepSeek {
57 default_model: Option<DeepseekModel>,
58 api_url: Option<String>,
59 },
60}
61
62#[derive(Debug, Default)]
63pub struct AssistantSettings {
64 pub enabled: bool,
65 pub button: bool,
66 pub dock: AssistantDockPosition,
67 pub default_width: Pixels,
68 pub default_height: Pixels,
69 pub default_model: LanguageModelSelection,
70 pub editor_model: LanguageModelSelection,
71 pub inline_alternatives: Vec<LanguageModelSelection>,
72 pub using_outdated_settings_version: bool,
73 pub enable_experimental_live_diffs: bool,
74 pub default_profile: Arc<str>,
75 pub profiles: IndexMap<Arc<str>, AgentProfile>,
76 pub always_allow_tool_actions: bool,
77 pub notify_when_agent_waiting: bool,
78}
79
80impl AssistantSettings {
81 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
82 cx.is_staff() || self.enable_experimental_live_diffs
83 }
84}
85
86/// Assistant panel settings
87#[derive(Clone, Serialize, Deserialize, Debug)]
88#[serde(untagged)]
89pub enum AssistantSettingsContent {
90 Versioned(VersionedAssistantSettingsContent),
91 Legacy(LegacyAssistantSettingsContent),
92}
93
94impl JsonSchema for AssistantSettingsContent {
95 fn schema_name() -> String {
96 VersionedAssistantSettingsContent::schema_name()
97 }
98
99 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
100 VersionedAssistantSettingsContent::json_schema(gen)
101 }
102
103 fn is_referenceable() -> bool {
104 VersionedAssistantSettingsContent::is_referenceable()
105 }
106}
107
108impl Default for AssistantSettingsContent {
109 fn default() -> Self {
110 Self::Versioned(VersionedAssistantSettingsContent::default())
111 }
112}
113
114impl AssistantSettingsContent {
115 pub fn is_version_outdated(&self) -> bool {
116 match self {
117 AssistantSettingsContent::Versioned(settings) => match settings {
118 VersionedAssistantSettingsContent::V1(_) => true,
119 VersionedAssistantSettingsContent::V2(_) => false,
120 },
121 AssistantSettingsContent::Legacy(_) => true,
122 }
123 }
124
125 fn upgrade(&self) -> AssistantSettingsContentV2 {
126 match self {
127 AssistantSettingsContent::Versioned(settings) => match settings {
128 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
129 enabled: settings.enabled,
130 button: settings.button,
131 dock: settings.dock,
132 default_width: settings.default_width,
133 default_height: settings.default_width,
134 default_model: settings
135 .provider
136 .clone()
137 .and_then(|provider| match provider {
138 AssistantProviderContentV1::ZedDotDev { default_model } => {
139 default_model.map(|model| LanguageModelSelection {
140 provider: "zed.dev".to_string(),
141 model: model.id().to_string(),
142 })
143 }
144 AssistantProviderContentV1::OpenAi { default_model, .. } => {
145 default_model.map(|model| LanguageModelSelection {
146 provider: "openai".to_string(),
147 model: model.id().to_string(),
148 })
149 }
150 AssistantProviderContentV1::Anthropic { default_model, .. } => {
151 default_model.map(|model| LanguageModelSelection {
152 provider: "anthropic".to_string(),
153 model: model.id().to_string(),
154 })
155 }
156 AssistantProviderContentV1::Ollama { default_model, .. } => {
157 default_model.map(|model| LanguageModelSelection {
158 provider: "ollama".to_string(),
159 model: model.id().to_string(),
160 })
161 }
162 AssistantProviderContentV1::LmStudio { default_model, .. } => {
163 default_model.map(|model| LanguageModelSelection {
164 provider: "lmstudio".to_string(),
165 model: model.id().to_string(),
166 })
167 }
168 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
169 default_model.map(|model| LanguageModelSelection {
170 provider: "deepseek".to_string(),
171 model: model.id().to_string(),
172 })
173 }
174 }),
175 editor_model: None,
176 inline_alternatives: None,
177 enable_experimental_live_diffs: None,
178 default_profile: None,
179 profiles: None,
180 always_allow_tool_actions: None,
181 notify_when_agent_waiting: None,
182 },
183 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
184 },
185 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
186 enabled: None,
187 button: settings.button,
188 dock: settings.dock,
189 default_width: settings.default_width,
190 default_height: settings.default_height,
191 default_model: Some(LanguageModelSelection {
192 provider: "openai".to_string(),
193 model: settings
194 .default_open_ai_model
195 .clone()
196 .unwrap_or_default()
197 .id()
198 .to_string(),
199 }),
200 editor_model: None,
201 inline_alternatives: None,
202 enable_experimental_live_diffs: None,
203 default_profile: None,
204 profiles: None,
205 always_allow_tool_actions: None,
206 notify_when_agent_waiting: None,
207 },
208 }
209 }
210
211 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
212 match self {
213 AssistantSettingsContent::Versioned(settings) => match settings {
214 VersionedAssistantSettingsContent::V1(settings) => {
215 settings.dock = Some(dock);
216 }
217 VersionedAssistantSettingsContent::V2(settings) => {
218 settings.dock = Some(dock);
219 }
220 },
221 AssistantSettingsContent::Legacy(settings) => {
222 settings.dock = Some(dock);
223 }
224 }
225 }
226
227 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
228 let model = language_model.id().0.to_string();
229 let provider = language_model.provider_id().0.to_string();
230
231 match self {
232 AssistantSettingsContent::Versioned(settings) => match settings {
233 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
234 "zed.dev" => {
235 log::warn!("attempted to set zed.dev model on outdated settings");
236 }
237 "anthropic" => {
238 let api_url = match &settings.provider {
239 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
240 api_url.clone()
241 }
242 _ => None,
243 };
244 settings.provider = Some(AssistantProviderContentV1::Anthropic {
245 default_model: AnthropicModel::from_id(&model).ok(),
246 api_url,
247 });
248 }
249 "ollama" => {
250 let api_url = match &settings.provider {
251 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
252 api_url.clone()
253 }
254 _ => None,
255 };
256 settings.provider = Some(AssistantProviderContentV1::Ollama {
257 default_model: Some(ollama::Model::new(&model, None, None)),
258 api_url,
259 });
260 }
261 "lmstudio" => {
262 let api_url = match &settings.provider {
263 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
264 api_url.clone()
265 }
266 _ => None,
267 };
268 settings.provider = Some(AssistantProviderContentV1::LmStudio {
269 default_model: Some(lmstudio::Model::new(&model, None, None)),
270 api_url,
271 });
272 }
273 "openai" => {
274 let (api_url, available_models) = match &settings.provider {
275 Some(AssistantProviderContentV1::OpenAi {
276 api_url,
277 available_models,
278 ..
279 }) => (api_url.clone(), available_models.clone()),
280 _ => (None, None),
281 };
282 settings.provider = Some(AssistantProviderContentV1::OpenAi {
283 default_model: OpenAiModel::from_id(&model).ok(),
284 api_url,
285 available_models,
286 });
287 }
288 "deepseek" => {
289 let api_url = match &settings.provider {
290 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
291 api_url.clone()
292 }
293 _ => None,
294 };
295 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
296 default_model: DeepseekModel::from_id(&model).ok(),
297 api_url,
298 });
299 }
300 _ => {}
301 },
302 VersionedAssistantSettingsContent::V2(settings) => {
303 settings.default_model = Some(LanguageModelSelection { provider, model });
304 }
305 },
306 AssistantSettingsContent::Legacy(settings) => {
307 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
308 settings.default_open_ai_model = Some(model);
309 }
310 }
311 }
312 }
313
314 pub fn set_profile(&mut self, profile_id: Arc<str>) {
315 match self {
316 AssistantSettingsContent::Versioned(settings) => match settings {
317 VersionedAssistantSettingsContent::V2(settings) => {
318 settings.default_profile = Some(profile_id);
319 }
320 VersionedAssistantSettingsContent::V1(_) => {}
321 },
322 AssistantSettingsContent::Legacy(_) => {}
323 }
324 }
325}
326
327#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
328#[serde(tag = "version")]
329pub enum VersionedAssistantSettingsContent {
330 #[serde(rename = "1")]
331 V1(AssistantSettingsContentV1),
332 #[serde(rename = "2")]
333 V2(AssistantSettingsContentV2),
334}
335
336impl Default for VersionedAssistantSettingsContent {
337 fn default() -> Self {
338 Self::V2(AssistantSettingsContentV2 {
339 enabled: None,
340 button: None,
341 dock: None,
342 default_width: None,
343 default_height: None,
344 default_model: None,
345 editor_model: None,
346 inline_alternatives: None,
347 enable_experimental_live_diffs: None,
348 default_profile: None,
349 profiles: None,
350 always_allow_tool_actions: None,
351 notify_when_agent_waiting: None,
352 })
353 }
354}
355
356#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
357pub struct AssistantSettingsContentV2 {
358 /// Whether the Assistant is enabled.
359 ///
360 /// Default: true
361 enabled: Option<bool>,
362 /// Whether to show the assistant panel button in the status bar.
363 ///
364 /// Default: true
365 button: Option<bool>,
366 /// Where to dock the assistant.
367 ///
368 /// Default: right
369 dock: Option<AssistantDockPosition>,
370 /// Default width in pixels when the assistant is docked to the left or right.
371 ///
372 /// Default: 640
373 default_width: Option<f32>,
374 /// Default height in pixels when the assistant is docked to the bottom.
375 ///
376 /// Default: 320
377 default_height: Option<f32>,
378 /// The default model to use when creating new chats.
379 default_model: Option<LanguageModelSelection>,
380 /// The model to use when applying edits from the assistant.
381 editor_model: Option<LanguageModelSelection>,
382 /// Additional models with which to generate alternatives when performing inline assists.
383 inline_alternatives: Option<Vec<LanguageModelSelection>>,
384 /// Enable experimental live diffs in the assistant panel.
385 ///
386 /// Default: false
387 enable_experimental_live_diffs: Option<bool>,
388 #[schemars(skip)]
389 default_profile: Option<Arc<str>>,
390 #[schemars(skip)]
391 pub profiles: Option<IndexMap<Arc<str>, AgentProfileContent>>,
392 /// Whenever a tool action would normally wait for your confirmation
393 /// that you allow it, always choose to allow it.
394 ///
395 /// Default: false
396 always_allow_tool_actions: Option<bool>,
397 /// Whether to show a popup notification when the agent is waiting for user input.
398 ///
399 /// Default: true
400 notify_when_agent_waiting: Option<bool>,
401}
402
403#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
404pub struct LanguageModelSelection {
405 #[schemars(schema_with = "providers_schema")]
406 pub provider: String,
407 pub model: String,
408}
409
410fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
411 schemars::schema::SchemaObject {
412 enum_values: Some(vec![
413 "anthropic".into(),
414 "bedrock".into(),
415 "google".into(),
416 "lmstudio".into(),
417 "ollama".into(),
418 "openai".into(),
419 "zed.dev".into(),
420 "copilot_chat".into(),
421 "deepseek".into(),
422 ]),
423 ..Default::default()
424 }
425 .into()
426}
427
428impl Default for LanguageModelSelection {
429 fn default() -> Self {
430 Self {
431 provider: "openai".to_string(),
432 model: "gpt-4".to_string(),
433 }
434 }
435}
436
437#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
438pub struct AgentProfileContent {
439 pub name: Arc<str>,
440 pub tools: IndexMap<Arc<str>, bool>,
441 #[serde(default)]
442 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
443}
444
445#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
446pub struct ContextServerPresetContent {
447 pub tools: IndexMap<Arc<str>, bool>,
448}
449
450#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
451pub struct AssistantSettingsContentV1 {
452 /// Whether the Assistant is enabled.
453 ///
454 /// Default: true
455 enabled: Option<bool>,
456 /// Whether to show the assistant panel button in the status bar.
457 ///
458 /// Default: true
459 button: Option<bool>,
460 /// Where to dock the assistant.
461 ///
462 /// Default: right
463 dock: Option<AssistantDockPosition>,
464 /// Default width in pixels when the assistant is docked to the left or right.
465 ///
466 /// Default: 640
467 default_width: Option<f32>,
468 /// Default height in pixels when the assistant is docked to the bottom.
469 ///
470 /// Default: 320
471 default_height: Option<f32>,
472 /// The provider of the assistant service.
473 ///
474 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
475 /// each with their respective default models and configurations.
476 provider: Option<AssistantProviderContentV1>,
477}
478
479#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
480pub struct LegacyAssistantSettingsContent {
481 /// Whether to show the assistant panel button in the status bar.
482 ///
483 /// Default: true
484 pub button: Option<bool>,
485 /// Where to dock the assistant.
486 ///
487 /// Default: right
488 pub dock: Option<AssistantDockPosition>,
489 /// Default width in pixels when the assistant is docked to the left or right.
490 ///
491 /// Default: 640
492 pub default_width: Option<f32>,
493 /// Default height in pixels when the assistant is docked to the bottom.
494 ///
495 /// Default: 320
496 pub default_height: Option<f32>,
497 /// The default OpenAI model to use when creating new chats.
498 ///
499 /// Default: gpt-4-1106-preview
500 pub default_open_ai_model: Option<OpenAiModel>,
501 /// OpenAI API base URL to use when creating new chats.
502 ///
503 /// Default: <https://api.openai.com/v1>
504 pub openai_api_url: Option<String>,
505}
506
507impl Settings for AssistantSettings {
508 const KEY: Option<&'static str> = Some("assistant");
509
510 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
511
512 type FileContent = AssistantSettingsContent;
513
514 fn load(
515 sources: SettingsSources<Self::FileContent>,
516 _: &mut gpui::App,
517 ) -> anyhow::Result<Self> {
518 let mut settings = AssistantSettings::default();
519
520 for value in sources.defaults_and_customizations() {
521 if value.is_version_outdated() {
522 settings.using_outdated_settings_version = true;
523 }
524
525 let value = value.upgrade();
526 merge(&mut settings.enabled, value.enabled);
527 merge(&mut settings.button, value.button);
528 merge(&mut settings.dock, value.dock);
529 merge(
530 &mut settings.default_width,
531 value.default_width.map(Into::into),
532 );
533 merge(
534 &mut settings.default_height,
535 value.default_height.map(Into::into),
536 );
537 merge(&mut settings.default_model, value.default_model);
538 merge(&mut settings.editor_model, value.editor_model);
539 merge(&mut settings.inline_alternatives, value.inline_alternatives);
540 merge(
541 &mut settings.enable_experimental_live_diffs,
542 value.enable_experimental_live_diffs,
543 );
544 merge(
545 &mut settings.always_allow_tool_actions,
546 value.always_allow_tool_actions,
547 );
548 merge(
549 &mut settings.notify_when_agent_waiting,
550 value.notify_when_agent_waiting,
551 );
552 merge(&mut settings.default_profile, value.default_profile);
553
554 if let Some(profiles) = value.profiles {
555 settings
556 .profiles
557 .extend(profiles.into_iter().map(|(id, profile)| {
558 (
559 id,
560 AgentProfile {
561 name: profile.name.into(),
562 tools: profile.tools,
563 context_servers: profile
564 .context_servers
565 .into_iter()
566 .map(|(context_server_id, preset)| {
567 (
568 context_server_id,
569 ContextServerPreset {
570 tools: preset.tools.clone(),
571 },
572 )
573 })
574 .collect(),
575 },
576 )
577 }));
578 }
579 }
580
581 Ok(settings)
582 }
583}
584
585fn merge<T>(target: &mut T, value: Option<T>) {
586 if let Some(value) = value {
587 *target = value;
588 }
589}
590
591#[cfg(test)]
592mod tests {
593 use fs::Fs;
594 use gpui::{ReadGlobal, TestAppContext};
595
596 use super::*;
597
598 #[gpui::test]
599 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
600 let fs = fs::FakeFs::new(cx.executor().clone());
601 fs.create_dir(paths::settings_file().parent().unwrap())
602 .await
603 .unwrap();
604
605 cx.update(|cx| {
606 let test_settings = settings::SettingsStore::test(cx);
607 cx.set_global(test_settings);
608 AssistantSettings::register(cx);
609 });
610
611 cx.update(|cx| {
612 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
613 assert_eq!(
614 AssistantSettings::get_global(cx).default_model,
615 LanguageModelSelection {
616 provider: "zed.dev".into(),
617 model: "claude-3-5-sonnet-latest".into(),
618 }
619 );
620 });
621
622 cx.update(|cx| {
623 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
624 fs.clone(),
625 |settings, _| {
626 *settings = AssistantSettingsContent::Versioned(
627 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
628 default_model: Some(LanguageModelSelection {
629 provider: "test-provider".into(),
630 model: "gpt-99".into(),
631 }),
632 editor_model: Some(LanguageModelSelection {
633 provider: "test-provider".into(),
634 model: "gpt-99".into(),
635 }),
636 inline_alternatives: None,
637 enabled: None,
638 button: None,
639 dock: None,
640 default_width: None,
641 default_height: None,
642 enable_experimental_live_diffs: None,
643 default_profile: None,
644 profiles: None,
645 always_allow_tool_actions: None,
646 notify_when_agent_waiting: None,
647 }),
648 )
649 },
650 );
651 });
652
653 cx.run_until_parked();
654
655 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
656 assert!(raw_settings_value.contains(r#""version": "2""#));
657
658 #[derive(Debug, Deserialize)]
659 struct AssistantSettingsTest {
660 assistant: AssistantSettingsContent,
661 }
662
663 let assistant_settings: AssistantSettingsTest =
664 serde_json_lenient::from_str(&raw_settings_value).unwrap();
665
666 assert!(!assistant_settings.assistant.is_version_outdated());
667 }
668}