1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use deepseek::Model as DeepseekModel;
8use feature_flags::FeatureFlagAppExt;
9use gpui::{App, Pixels};
10use indexmap::IndexMap;
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{schema::Schema, JsonSchema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
21#[serde(rename_all = "snake_case")]
22pub enum AssistantDockPosition {
23 Left,
24 #[default]
25 Right,
26 Bottom,
27}
28
29#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
30#[serde(tag = "name", rename_all = "snake_case")]
31pub enum AssistantProviderContentV1 {
32 #[serde(rename = "zed.dev")]
33 ZedDotDev { default_model: Option<CloudModel> },
34 #[serde(rename = "openai")]
35 OpenAi {
36 default_model: Option<OpenAiModel>,
37 api_url: Option<String>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 },
45 #[serde(rename = "ollama")]
46 Ollama {
47 default_model: Option<OllamaModel>,
48 api_url: Option<String>,
49 },
50 #[serde(rename = "lmstudio")]
51 LmStudio {
52 default_model: Option<LmStudioModel>,
53 api_url: Option<String>,
54 },
55 #[serde(rename = "deepseek")]
56 DeepSeek {
57 default_model: Option<DeepseekModel>,
58 api_url: Option<String>,
59 },
60}
61
62#[derive(Debug, Default)]
63pub struct AssistantSettings {
64 pub enabled: bool,
65 pub button: bool,
66 pub dock: AssistantDockPosition,
67 pub default_width: Pixels,
68 pub default_height: Pixels,
69 pub default_model: LanguageModelSelection,
70 pub editor_model: LanguageModelSelection,
71 pub inline_alternatives: Vec<LanguageModelSelection>,
72 pub using_outdated_settings_version: bool,
73 pub enable_experimental_live_diffs: bool,
74 pub profiles: IndexMap<Arc<str>, AgentProfile>,
75}
76
77impl AssistantSettings {
78 pub fn are_live_diffs_enabled(&self, cx: &App) -> bool {
79 cx.is_staff() || self.enable_experimental_live_diffs
80 }
81}
82
83/// Assistant panel settings
84#[derive(Clone, Serialize, Deserialize, Debug)]
85#[serde(untagged)]
86pub enum AssistantSettingsContent {
87 Versioned(VersionedAssistantSettingsContent),
88 Legacy(LegacyAssistantSettingsContent),
89}
90
91impl JsonSchema for AssistantSettingsContent {
92 fn schema_name() -> String {
93 VersionedAssistantSettingsContent::schema_name()
94 }
95
96 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
97 VersionedAssistantSettingsContent::json_schema(gen)
98 }
99
100 fn is_referenceable() -> bool {
101 VersionedAssistantSettingsContent::is_referenceable()
102 }
103}
104
105impl Default for AssistantSettingsContent {
106 fn default() -> Self {
107 Self::Versioned(VersionedAssistantSettingsContent::default())
108 }
109}
110
111impl AssistantSettingsContent {
112 pub fn is_version_outdated(&self) -> bool {
113 match self {
114 AssistantSettingsContent::Versioned(settings) => match settings {
115 VersionedAssistantSettingsContent::V1(_) => true,
116 VersionedAssistantSettingsContent::V2(_) => false,
117 },
118 AssistantSettingsContent::Legacy(_) => true,
119 }
120 }
121
122 fn upgrade(&self) -> AssistantSettingsContentV2 {
123 match self {
124 AssistantSettingsContent::Versioned(settings) => match settings {
125 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
126 enabled: settings.enabled,
127 button: settings.button,
128 dock: settings.dock,
129 default_width: settings.default_width,
130 default_height: settings.default_width,
131 default_model: settings
132 .provider
133 .clone()
134 .and_then(|provider| match provider {
135 AssistantProviderContentV1::ZedDotDev { default_model } => {
136 default_model.map(|model| LanguageModelSelection {
137 provider: "zed.dev".to_string(),
138 model: model.id().to_string(),
139 })
140 }
141 AssistantProviderContentV1::OpenAi { default_model, .. } => {
142 default_model.map(|model| LanguageModelSelection {
143 provider: "openai".to_string(),
144 model: model.id().to_string(),
145 })
146 }
147 AssistantProviderContentV1::Anthropic { default_model, .. } => {
148 default_model.map(|model| LanguageModelSelection {
149 provider: "anthropic".to_string(),
150 model: model.id().to_string(),
151 })
152 }
153 AssistantProviderContentV1::Ollama { default_model, .. } => {
154 default_model.map(|model| LanguageModelSelection {
155 provider: "ollama".to_string(),
156 model: model.id().to_string(),
157 })
158 }
159 AssistantProviderContentV1::LmStudio { default_model, .. } => {
160 default_model.map(|model| LanguageModelSelection {
161 provider: "lmstudio".to_string(),
162 model: model.id().to_string(),
163 })
164 }
165 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
166 default_model.map(|model| LanguageModelSelection {
167 provider: "deepseek".to_string(),
168 model: model.id().to_string(),
169 })
170 }
171 }),
172 editor_model: None,
173 inline_alternatives: None,
174 enable_experimental_live_diffs: None,
175 profiles: None,
176 },
177 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
178 },
179 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
180 enabled: None,
181 button: settings.button,
182 dock: settings.dock,
183 default_width: settings.default_width,
184 default_height: settings.default_height,
185 default_model: Some(LanguageModelSelection {
186 provider: "openai".to_string(),
187 model: settings
188 .default_open_ai_model
189 .clone()
190 .unwrap_or_default()
191 .id()
192 .to_string(),
193 }),
194 editor_model: None,
195 inline_alternatives: None,
196 enable_experimental_live_diffs: None,
197 profiles: None,
198 },
199 }
200 }
201
202 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
203 match self {
204 AssistantSettingsContent::Versioned(settings) => match settings {
205 VersionedAssistantSettingsContent::V1(settings) => {
206 settings.dock = Some(dock);
207 }
208 VersionedAssistantSettingsContent::V2(settings) => {
209 settings.dock = Some(dock);
210 }
211 },
212 AssistantSettingsContent::Legacy(settings) => {
213 settings.dock = Some(dock);
214 }
215 }
216 }
217
218 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
219 let model = language_model.id().0.to_string();
220 let provider = language_model.provider_id().0.to_string();
221
222 match self {
223 AssistantSettingsContent::Versioned(settings) => match settings {
224 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
225 "zed.dev" => {
226 log::warn!("attempted to set zed.dev model on outdated settings");
227 }
228 "anthropic" => {
229 let api_url = match &settings.provider {
230 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
231 api_url.clone()
232 }
233 _ => None,
234 };
235 settings.provider = Some(AssistantProviderContentV1::Anthropic {
236 default_model: AnthropicModel::from_id(&model).ok(),
237 api_url,
238 });
239 }
240 "ollama" => {
241 let api_url = match &settings.provider {
242 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
243 api_url.clone()
244 }
245 _ => None,
246 };
247 settings.provider = Some(AssistantProviderContentV1::Ollama {
248 default_model: Some(ollama::Model::new(&model, None, None)),
249 api_url,
250 });
251 }
252 "lmstudio" => {
253 let api_url = match &settings.provider {
254 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
255 api_url.clone()
256 }
257 _ => None,
258 };
259 settings.provider = Some(AssistantProviderContentV1::LmStudio {
260 default_model: Some(lmstudio::Model::new(&model, None, None)),
261 api_url,
262 });
263 }
264 "openai" => {
265 let (api_url, available_models) = match &settings.provider {
266 Some(AssistantProviderContentV1::OpenAi {
267 api_url,
268 available_models,
269 ..
270 }) => (api_url.clone(), available_models.clone()),
271 _ => (None, None),
272 };
273 settings.provider = Some(AssistantProviderContentV1::OpenAi {
274 default_model: OpenAiModel::from_id(&model).ok(),
275 api_url,
276 available_models,
277 });
278 }
279 "deepseek" => {
280 let api_url = match &settings.provider {
281 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
282 api_url.clone()
283 }
284 _ => None,
285 };
286 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
287 default_model: DeepseekModel::from_id(&model).ok(),
288 api_url,
289 });
290 }
291 _ => {}
292 },
293 VersionedAssistantSettingsContent::V2(settings) => {
294 settings.default_model = Some(LanguageModelSelection { provider, model });
295 }
296 },
297 AssistantSettingsContent::Legacy(settings) => {
298 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
299 settings.default_open_ai_model = Some(model);
300 }
301 }
302 }
303 }
304}
305
306#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
307#[serde(tag = "version")]
308pub enum VersionedAssistantSettingsContent {
309 #[serde(rename = "1")]
310 V1(AssistantSettingsContentV1),
311 #[serde(rename = "2")]
312 V2(AssistantSettingsContentV2),
313}
314
315impl Default for VersionedAssistantSettingsContent {
316 fn default() -> Self {
317 Self::V2(AssistantSettingsContentV2 {
318 enabled: None,
319 button: None,
320 dock: None,
321 default_width: None,
322 default_height: None,
323 default_model: None,
324 editor_model: None,
325 inline_alternatives: None,
326 enable_experimental_live_diffs: None,
327 profiles: None,
328 })
329 }
330}
331
332#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
333pub struct AssistantSettingsContentV2 {
334 /// Whether the Assistant is enabled.
335 ///
336 /// Default: true
337 enabled: Option<bool>,
338 /// Whether to show the assistant panel button in the status bar.
339 ///
340 /// Default: true
341 button: Option<bool>,
342 /// Where to dock the assistant.
343 ///
344 /// Default: right
345 dock: Option<AssistantDockPosition>,
346 /// Default width in pixels when the assistant is docked to the left or right.
347 ///
348 /// Default: 640
349 default_width: Option<f32>,
350 /// Default height in pixels when the assistant is docked to the bottom.
351 ///
352 /// Default: 320
353 default_height: Option<f32>,
354 /// The default model to use when creating new chats.
355 default_model: Option<LanguageModelSelection>,
356 /// The model to use when applying edits from the assistant.
357 editor_model: Option<LanguageModelSelection>,
358 /// Additional models with which to generate alternatives when performing inline assists.
359 inline_alternatives: Option<Vec<LanguageModelSelection>>,
360 /// Enable experimental live diffs in the assistant panel.
361 ///
362 /// Default: false
363 enable_experimental_live_diffs: Option<bool>,
364 #[schemars(skip)]
365 profiles: Option<IndexMap<Arc<str>, AgentProfileContent>>,
366}
367
368#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
369pub struct LanguageModelSelection {
370 #[schemars(schema_with = "providers_schema")]
371 pub provider: String,
372 pub model: String,
373}
374
375fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
376 schemars::schema::SchemaObject {
377 enum_values: Some(vec![
378 "anthropic".into(),
379 "bedrock".into(),
380 "google".into(),
381 "lmstudio".into(),
382 "ollama".into(),
383 "openai".into(),
384 "zed.dev".into(),
385 "copilot_chat".into(),
386 "deepseek".into(),
387 ]),
388 ..Default::default()
389 }
390 .into()
391}
392
393impl Default for LanguageModelSelection {
394 fn default() -> Self {
395 Self {
396 provider: "openai".to_string(),
397 model: "gpt-4".to_string(),
398 }
399 }
400}
401
402#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
403pub struct AgentProfileContent {
404 pub name: Arc<str>,
405 pub tools: IndexMap<Arc<str>, bool>,
406}
407
408#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
409pub struct AssistantSettingsContentV1 {
410 /// Whether the Assistant is enabled.
411 ///
412 /// Default: true
413 enabled: Option<bool>,
414 /// Whether to show the assistant panel button in the status bar.
415 ///
416 /// Default: true
417 button: Option<bool>,
418 /// Where to dock the assistant.
419 ///
420 /// Default: right
421 dock: Option<AssistantDockPosition>,
422 /// Default width in pixels when the assistant is docked to the left or right.
423 ///
424 /// Default: 640
425 default_width: Option<f32>,
426 /// Default height in pixels when the assistant is docked to the bottom.
427 ///
428 /// Default: 320
429 default_height: Option<f32>,
430 /// The provider of the assistant service.
431 ///
432 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
433 /// each with their respective default models and configurations.
434 provider: Option<AssistantProviderContentV1>,
435}
436
437#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
438pub struct LegacyAssistantSettingsContent {
439 /// Whether to show the assistant panel button in the status bar.
440 ///
441 /// Default: true
442 pub button: Option<bool>,
443 /// Where to dock the assistant.
444 ///
445 /// Default: right
446 pub dock: Option<AssistantDockPosition>,
447 /// Default width in pixels when the assistant is docked to the left or right.
448 ///
449 /// Default: 640
450 pub default_width: Option<f32>,
451 /// Default height in pixels when the assistant is docked to the bottom.
452 ///
453 /// Default: 320
454 pub default_height: Option<f32>,
455 /// The default OpenAI model to use when creating new chats.
456 ///
457 /// Default: gpt-4-1106-preview
458 pub default_open_ai_model: Option<OpenAiModel>,
459 /// OpenAI API base URL to use when creating new chats.
460 ///
461 /// Default: <https://api.openai.com/v1>
462 pub openai_api_url: Option<String>,
463}
464
465impl Settings for AssistantSettings {
466 const KEY: Option<&'static str> = Some("assistant");
467
468 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
469
470 type FileContent = AssistantSettingsContent;
471
472 fn load(
473 sources: SettingsSources<Self::FileContent>,
474 _: &mut gpui::App,
475 ) -> anyhow::Result<Self> {
476 let mut settings = AssistantSettings::default();
477
478 for value in sources.defaults_and_customizations() {
479 if value.is_version_outdated() {
480 settings.using_outdated_settings_version = true;
481 }
482
483 let value = value.upgrade();
484 merge(&mut settings.enabled, value.enabled);
485 merge(&mut settings.button, value.button);
486 merge(&mut settings.dock, value.dock);
487 merge(
488 &mut settings.default_width,
489 value.default_width.map(Into::into),
490 );
491 merge(
492 &mut settings.default_height,
493 value.default_height.map(Into::into),
494 );
495 merge(&mut settings.default_model, value.default_model);
496 merge(&mut settings.editor_model, value.editor_model);
497 merge(&mut settings.inline_alternatives, value.inline_alternatives);
498 merge(
499 &mut settings.enable_experimental_live_diffs,
500 value.enable_experimental_live_diffs,
501 );
502
503 if let Some(profiles) = value.profiles {
504 settings
505 .profiles
506 .extend(profiles.into_iter().map(|(id, profile)| {
507 (
508 id,
509 AgentProfile {
510 name: profile.name.into(),
511 tools: profile.tools,
512 context_servers: IndexMap::default(),
513 },
514 )
515 }));
516 }
517 }
518
519 Ok(settings)
520 }
521}
522
523fn merge<T>(target: &mut T, value: Option<T>) {
524 if let Some(value) = value {
525 *target = value;
526 }
527}
528
529#[cfg(test)]
530mod tests {
531 use fs::Fs;
532 use gpui::{ReadGlobal, TestAppContext};
533
534 use super::*;
535
536 #[gpui::test]
537 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
538 let fs = fs::FakeFs::new(cx.executor().clone());
539 fs.create_dir(paths::settings_file().parent().unwrap())
540 .await
541 .unwrap();
542
543 cx.update(|cx| {
544 let test_settings = settings::SettingsStore::test(cx);
545 cx.set_global(test_settings);
546 AssistantSettings::register(cx);
547 });
548
549 cx.update(|cx| {
550 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
551 assert_eq!(
552 AssistantSettings::get_global(cx).default_model,
553 LanguageModelSelection {
554 provider: "zed.dev".into(),
555 model: "claude-3-5-sonnet-latest".into(),
556 }
557 );
558 });
559
560 cx.update(|cx| {
561 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
562 fs.clone(),
563 |settings, _| {
564 *settings = AssistantSettingsContent::Versioned(
565 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
566 default_model: Some(LanguageModelSelection {
567 provider: "test-provider".into(),
568 model: "gpt-99".into(),
569 }),
570 editor_model: Some(LanguageModelSelection {
571 provider: "test-provider".into(),
572 model: "gpt-99".into(),
573 }),
574 inline_alternatives: None,
575 enabled: None,
576 button: None,
577 dock: None,
578 default_width: None,
579 default_height: None,
580 enable_experimental_live_diffs: None,
581 profiles: None,
582 }),
583 )
584 },
585 );
586 });
587
588 cx.run_until_parked();
589
590 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
591 assert!(raw_settings_value.contains(r#""version": "2""#));
592
593 #[derive(Debug, Deserialize)]
594 struct AssistantSettingsTest {
595 assistant: AssistantSettingsContent,
596 }
597
598 let assistant_settings: AssistantSettingsTest =
599 serde_json_lenient::from_str(&raw_settings_value).unwrap();
600
601 assert!(!assistant_settings.assistant.is_version_outdated());
602 }
603}