1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use fs::Fs;
7use gpui::{AppContext, Pixels};
8use language_model::{CloudModel, LanguageModel};
9use language_models::{
10 provider::open_ai, AllLanguageModelSettings, AnthropicSettingsContent,
11 AnthropicSettingsContentV1, OllamaSettingsContent, OpenAiSettingsContent,
12 OpenAiSettingsContentV1, VersionedAnthropicSettingsContent, VersionedOpenAiSettingsContent,
13};
14use ollama::Model as OllamaModel;
15use schemars::{schema::Schema, JsonSchema};
16use serde::{Deserialize, Serialize};
17use settings::{update_settings_file, Settings, SettingsSources};
18
19#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
20#[serde(rename_all = "snake_case")]
21pub enum AssistantDockPosition {
22 Left,
23 #[default]
24 Right,
25 Bottom,
26}
27
28#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
29#[serde(tag = "name", rename_all = "snake_case")]
30pub enum AssistantProviderContentV1 {
31 #[serde(rename = "zed.dev")]
32 ZedDotDev { default_model: Option<CloudModel> },
33 #[serde(rename = "openai")]
34 OpenAi {
35 default_model: Option<OpenAiModel>,
36 api_url: Option<String>,
37 available_models: Option<Vec<OpenAiModel>>,
38 },
39 #[serde(rename = "anthropic")]
40 Anthropic {
41 default_model: Option<AnthropicModel>,
42 api_url: Option<String>,
43 },
44 #[serde(rename = "ollama")]
45 Ollama {
46 default_model: Option<OllamaModel>,
47 api_url: Option<String>,
48 },
49}
50
51#[derive(Debug, Default)]
52pub struct AssistantSettings {
53 pub enabled: bool,
54 pub button: bool,
55 pub dock: AssistantDockPosition,
56 pub default_width: Pixels,
57 pub default_height: Pixels,
58 pub default_model: LanguageModelSelection,
59 pub inline_alternatives: Vec<LanguageModelSelection>,
60 pub using_outdated_settings_version: bool,
61 pub enable_experimental_live_diffs: bool,
62}
63
64impl AssistantSettings {
65 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
66 cx.is_staff() || self.enable_experimental_live_diffs
67 }
68}
69
70/// Assistant panel settings
71#[derive(Clone, Serialize, Deserialize, Debug)]
72#[serde(untagged)]
73pub enum AssistantSettingsContent {
74 Versioned(VersionedAssistantSettingsContent),
75 Legacy(LegacyAssistantSettingsContent),
76}
77
78impl JsonSchema for AssistantSettingsContent {
79 fn schema_name() -> String {
80 VersionedAssistantSettingsContent::schema_name()
81 }
82
83 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
84 VersionedAssistantSettingsContent::json_schema(gen)
85 }
86
87 fn is_referenceable() -> bool {
88 VersionedAssistantSettingsContent::is_referenceable()
89 }
90}
91
92impl Default for AssistantSettingsContent {
93 fn default() -> Self {
94 Self::Versioned(VersionedAssistantSettingsContent::default())
95 }
96}
97
98impl AssistantSettingsContent {
99 pub fn is_version_outdated(&self) -> bool {
100 match self {
101 AssistantSettingsContent::Versioned(settings) => match settings {
102 VersionedAssistantSettingsContent::V1(_) => true,
103 VersionedAssistantSettingsContent::V2(_) => false,
104 },
105 AssistantSettingsContent::Legacy(_) => true,
106 }
107 }
108
109 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
110 if let AssistantSettingsContent::Versioned(settings) = self {
111 if let VersionedAssistantSettingsContent::V1(settings) = settings {
112 if let Some(provider) = settings.provider.clone() {
113 match provider {
114 AssistantProviderContentV1::Anthropic { api_url, .. } => {
115 update_settings_file::<AllLanguageModelSettings>(
116 fs,
117 cx,
118 move |content, _| {
119 if content.anthropic.is_none() {
120 content.anthropic =
121 Some(AnthropicSettingsContent::Versioned(
122 VersionedAnthropicSettingsContent::V1(
123 AnthropicSettingsContentV1 {
124 api_url,
125 available_models: None,
126 },
127 ),
128 ));
129 }
130 },
131 )
132 }
133 AssistantProviderContentV1::Ollama { api_url, .. } => {
134 update_settings_file::<AllLanguageModelSettings>(
135 fs,
136 cx,
137 move |content, _| {
138 if content.ollama.is_none() {
139 content.ollama = Some(OllamaSettingsContent {
140 api_url,
141 available_models: None,
142 });
143 }
144 },
145 )
146 }
147 AssistantProviderContentV1::OpenAi {
148 api_url,
149 available_models,
150 ..
151 } => update_settings_file::<AllLanguageModelSettings>(
152 fs,
153 cx,
154 move |content, _| {
155 if content.openai.is_none() {
156 let available_models = available_models.map(|models| {
157 models
158 .into_iter()
159 .filter_map(|model| match model {
160 OpenAiModel::Custom {
161 name,
162 display_name,
163 max_tokens,
164 max_output_tokens,
165 max_completion_tokens: None,
166 } => Some(open_ai::AvailableModel {
167 name,
168 display_name,
169 max_tokens,
170 max_output_tokens,
171 max_completion_tokens: None,
172 }),
173 _ => None,
174 })
175 .collect::<Vec<_>>()
176 });
177 content.openai = Some(OpenAiSettingsContent::Versioned(
178 VersionedOpenAiSettingsContent::V1(
179 OpenAiSettingsContentV1 {
180 api_url,
181 available_models,
182 },
183 ),
184 ));
185 }
186 },
187 ),
188 _ => {}
189 }
190 }
191 }
192 }
193
194 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
195 self.upgrade(),
196 ));
197 }
198
199 fn upgrade(&self) -> AssistantSettingsContentV2 {
200 match self {
201 AssistantSettingsContent::Versioned(settings) => match settings {
202 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
203 enabled: settings.enabled,
204 button: settings.button,
205 dock: settings.dock,
206 default_width: settings.default_width,
207 default_height: settings.default_width,
208 default_model: settings
209 .provider
210 .clone()
211 .and_then(|provider| match provider {
212 AssistantProviderContentV1::ZedDotDev { default_model } => {
213 default_model.map(|model| LanguageModelSelection {
214 provider: "zed.dev".to_string(),
215 model: model.id().to_string(),
216 })
217 }
218 AssistantProviderContentV1::OpenAi { default_model, .. } => {
219 default_model.map(|model| LanguageModelSelection {
220 provider: "openai".to_string(),
221 model: model.id().to_string(),
222 })
223 }
224 AssistantProviderContentV1::Anthropic { default_model, .. } => {
225 default_model.map(|model| LanguageModelSelection {
226 provider: "anthropic".to_string(),
227 model: model.id().to_string(),
228 })
229 }
230 AssistantProviderContentV1::Ollama { default_model, .. } => {
231 default_model.map(|model| LanguageModelSelection {
232 provider: "ollama".to_string(),
233 model: model.id().to_string(),
234 })
235 }
236 }),
237 inline_alternatives: None,
238 enable_experimental_live_diffs: None,
239 },
240 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
241 },
242 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
243 enabled: None,
244 button: settings.button,
245 dock: settings.dock,
246 default_width: settings.default_width,
247 default_height: settings.default_height,
248 default_model: Some(LanguageModelSelection {
249 provider: "openai".to_string(),
250 model: settings
251 .default_open_ai_model
252 .clone()
253 .unwrap_or_default()
254 .id()
255 .to_string(),
256 }),
257 inline_alternatives: None,
258 enable_experimental_live_diffs: None,
259 },
260 }
261 }
262
263 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
264 match self {
265 AssistantSettingsContent::Versioned(settings) => match settings {
266 VersionedAssistantSettingsContent::V1(settings) => {
267 settings.dock = Some(dock);
268 }
269 VersionedAssistantSettingsContent::V2(settings) => {
270 settings.dock = Some(dock);
271 }
272 },
273 AssistantSettingsContent::Legacy(settings) => {
274 settings.dock = Some(dock);
275 }
276 }
277 }
278
279 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
280 let model = language_model.id().0.to_string();
281 let provider = language_model.provider_id().0.to_string();
282
283 match self {
284 AssistantSettingsContent::Versioned(settings) => match settings {
285 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
286 "zed.dev" => {
287 log::warn!("attempted to set zed.dev model on outdated settings");
288 }
289 "anthropic" => {
290 let api_url = match &settings.provider {
291 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
292 api_url.clone()
293 }
294 _ => None,
295 };
296 settings.provider = Some(AssistantProviderContentV1::Anthropic {
297 default_model: AnthropicModel::from_id(&model).ok(),
298 api_url,
299 });
300 }
301 "ollama" => {
302 let api_url = match &settings.provider {
303 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
304 api_url.clone()
305 }
306 _ => None,
307 };
308 settings.provider = Some(AssistantProviderContentV1::Ollama {
309 default_model: Some(ollama::Model::new(&model, None, None)),
310 api_url,
311 });
312 }
313 "openai" => {
314 let (api_url, available_models) = match &settings.provider {
315 Some(AssistantProviderContentV1::OpenAi {
316 api_url,
317 available_models,
318 ..
319 }) => (api_url.clone(), available_models.clone()),
320 _ => (None, None),
321 };
322 settings.provider = Some(AssistantProviderContentV1::OpenAi {
323 default_model: OpenAiModel::from_id(&model).ok(),
324 api_url,
325 available_models,
326 });
327 }
328 _ => {}
329 },
330 VersionedAssistantSettingsContent::V2(settings) => {
331 settings.default_model = Some(LanguageModelSelection { provider, model });
332 }
333 },
334 AssistantSettingsContent::Legacy(settings) => {
335 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
336 settings.default_open_ai_model = Some(model);
337 }
338 }
339 }
340 }
341}
342
343#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
344#[serde(tag = "version")]
345pub enum VersionedAssistantSettingsContent {
346 #[serde(rename = "1")]
347 V1(AssistantSettingsContentV1),
348 #[serde(rename = "2")]
349 V2(AssistantSettingsContentV2),
350}
351
352impl Default for VersionedAssistantSettingsContent {
353 fn default() -> Self {
354 Self::V2(AssistantSettingsContentV2 {
355 enabled: None,
356 button: None,
357 dock: None,
358 default_width: None,
359 default_height: None,
360 default_model: None,
361 inline_alternatives: None,
362 enable_experimental_live_diffs: None,
363 })
364 }
365}
366
367#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
368pub struct AssistantSettingsContentV2 {
369 /// Whether the Assistant is enabled.
370 ///
371 /// Default: true
372 enabled: Option<bool>,
373 /// Whether to show the assistant panel button in the status bar.
374 ///
375 /// Default: true
376 button: Option<bool>,
377 /// Where to dock the assistant.
378 ///
379 /// Default: right
380 dock: Option<AssistantDockPosition>,
381 /// Default width in pixels when the assistant is docked to the left or right.
382 ///
383 /// Default: 640
384 default_width: Option<f32>,
385 /// Default height in pixels when the assistant is docked to the bottom.
386 ///
387 /// Default: 320
388 default_height: Option<f32>,
389 /// The default model to use when creating new chats.
390 default_model: Option<LanguageModelSelection>,
391 /// Additional models with which to generate alternatives when performing inline assists.
392 inline_alternatives: Option<Vec<LanguageModelSelection>>,
393 /// Enable experimental live diffs in the assistant panel.
394 ///
395 /// Default: false
396 enable_experimental_live_diffs: Option<bool>,
397}
398
399#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
400pub struct LanguageModelSelection {
401 #[schemars(schema_with = "providers_schema")]
402 pub provider: String,
403 pub model: String,
404}
405
406fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
407 schemars::schema::SchemaObject {
408 enum_values: Some(vec![
409 "anthropic".into(),
410 "google".into(),
411 "ollama".into(),
412 "openai".into(),
413 "zed.dev".into(),
414 "copilot_chat".into(),
415 ]),
416 ..Default::default()
417 }
418 .into()
419}
420
421impl Default for LanguageModelSelection {
422 fn default() -> Self {
423 Self {
424 provider: "openai".to_string(),
425 model: "gpt-4".to_string(),
426 }
427 }
428}
429
430#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
431pub struct AssistantSettingsContentV1 {
432 /// Whether the Assistant is enabled.
433 ///
434 /// Default: true
435 enabled: Option<bool>,
436 /// Whether to show the assistant panel button in the status bar.
437 ///
438 /// Default: true
439 button: Option<bool>,
440 /// Where to dock the assistant.
441 ///
442 /// Default: right
443 dock: Option<AssistantDockPosition>,
444 /// Default width in pixels when the assistant is docked to the left or right.
445 ///
446 /// Default: 640
447 default_width: Option<f32>,
448 /// Default height in pixels when the assistant is docked to the bottom.
449 ///
450 /// Default: 320
451 default_height: Option<f32>,
452 /// The provider of the assistant service.
453 ///
454 /// This can be "openai", "anthropic", "ollama", "zed.dev"
455 /// each with their respective default models and configurations.
456 provider: Option<AssistantProviderContentV1>,
457}
458
459#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
460pub struct LegacyAssistantSettingsContent {
461 /// Whether to show the assistant panel button in the status bar.
462 ///
463 /// Default: true
464 pub button: Option<bool>,
465 /// Where to dock the assistant.
466 ///
467 /// Default: right
468 pub dock: Option<AssistantDockPosition>,
469 /// Default width in pixels when the assistant is docked to the left or right.
470 ///
471 /// Default: 640
472 pub default_width: Option<f32>,
473 /// Default height in pixels when the assistant is docked to the bottom.
474 ///
475 /// Default: 320
476 pub default_height: Option<f32>,
477 /// The default OpenAI model to use when creating new chats.
478 ///
479 /// Default: gpt-4-1106-preview
480 pub default_open_ai_model: Option<OpenAiModel>,
481 /// OpenAI API base URL to use when creating new chats.
482 ///
483 /// Default: https://api.openai.com/v1
484 pub openai_api_url: Option<String>,
485}
486
487impl Settings for AssistantSettings {
488 const KEY: Option<&'static str> = Some("assistant");
489
490 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
491
492 type FileContent = AssistantSettingsContent;
493
494 fn load(
495 sources: SettingsSources<Self::FileContent>,
496 _: &mut gpui::AppContext,
497 ) -> anyhow::Result<Self> {
498 let mut settings = AssistantSettings::default();
499
500 for value in sources.defaults_and_customizations() {
501 if value.is_version_outdated() {
502 settings.using_outdated_settings_version = true;
503 }
504
505 let value = value.upgrade();
506 merge(&mut settings.enabled, value.enabled);
507 merge(&mut settings.button, value.button);
508 merge(&mut settings.dock, value.dock);
509 merge(
510 &mut settings.default_width,
511 value.default_width.map(Into::into),
512 );
513 merge(
514 &mut settings.default_height,
515 value.default_height.map(Into::into),
516 );
517 merge(&mut settings.default_model, value.default_model);
518 merge(&mut settings.inline_alternatives, value.inline_alternatives);
519 merge(
520 &mut settings.enable_experimental_live_diffs,
521 value.enable_experimental_live_diffs,
522 );
523 }
524
525 Ok(settings)
526 }
527}
528
529fn merge<T>(target: &mut T, value: Option<T>) {
530 if let Some(value) = value {
531 *target = value;
532 }
533}
534
535#[cfg(test)]
536mod tests {
537 use gpui::{ReadGlobal, TestAppContext};
538
539 use super::*;
540
541 #[gpui::test]
542 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
543 let fs = fs::FakeFs::new(cx.executor().clone());
544 fs.create_dir(paths::settings_file().parent().unwrap())
545 .await
546 .unwrap();
547
548 cx.update(|cx| {
549 let test_settings = settings::SettingsStore::test(cx);
550 cx.set_global(test_settings);
551 AssistantSettings::register(cx);
552 });
553
554 cx.update(|cx| {
555 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
556 assert_eq!(
557 AssistantSettings::get_global(cx).default_model,
558 LanguageModelSelection {
559 provider: "zed.dev".into(),
560 model: "claude-3-5-sonnet".into(),
561 }
562 );
563 });
564
565 cx.update(|cx| {
566 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
567 fs.clone(),
568 |settings, _| {
569 *settings = AssistantSettingsContent::Versioned(
570 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
571 default_model: Some(LanguageModelSelection {
572 provider: "test-provider".into(),
573 model: "gpt-99".into(),
574 }),
575 inline_alternatives: None,
576 enabled: None,
577 button: None,
578 dock: None,
579 default_width: None,
580 default_height: None,
581 enable_experimental_live_diffs: None,
582 }),
583 )
584 },
585 );
586 });
587
588 cx.run_until_parked();
589
590 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
591 assert!(raw_settings_value.contains(r#""version": "2""#));
592
593 #[derive(Debug, Deserialize)]
594 struct AssistantSettingsTest {
595 assistant: AssistantSettingsContent,
596 }
597
598 let assistant_settings: AssistantSettingsTest =
599 serde_json_lenient::from_str(&raw_settings_value).unwrap();
600
601 assert!(!assistant_settings.assistant.is_version_outdated());
602 }
603}