1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use fs::Fs;
7use gpui::{AppContext, Pixels};
8use language_model::provider::open_ai;
9use language_model::settings::{
10 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
11 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
12 VersionedOpenAiSettingsContent,
13};
14use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
15use ollama::Model as OllamaModel;
16use schemars::{schema::Schema, JsonSchema};
17use serde::{Deserialize, Serialize};
18use settings::{update_settings_file, Settings, SettingsSources};
19
20#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
21#[serde(rename_all = "snake_case")]
22pub enum AssistantDockPosition {
23 Left,
24 #[default]
25 Right,
26 Bottom,
27}
28
29#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
30#[serde(tag = "name", rename_all = "snake_case")]
31pub enum AssistantProviderContentV1 {
32 #[serde(rename = "zed.dev")]
33 ZedDotDev { default_model: Option<CloudModel> },
34 #[serde(rename = "openai")]
35 OpenAi {
36 default_model: Option<OpenAiModel>,
37 api_url: Option<String>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 },
45 #[serde(rename = "ollama")]
46 Ollama {
47 default_model: Option<OllamaModel>,
48 api_url: Option<String>,
49 },
50}
51
52#[derive(Debug, Default)]
53pub struct AssistantSettings {
54 pub enabled: bool,
55 pub button: bool,
56 pub dock: AssistantDockPosition,
57 pub default_width: Pixels,
58 pub default_height: Pixels,
59 pub default_model: LanguageModelSelection,
60 pub inline_alternatives: Vec<LanguageModelSelection>,
61 pub using_outdated_settings_version: bool,
62 pub enable_experimental_live_diffs: bool,
63}
64
65impl AssistantSettings {
66 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
67 cx.is_staff() || self.enable_experimental_live_diffs
68 }
69}
70
71/// Assistant panel settings
72#[derive(Clone, Serialize, Deserialize, Debug)]
73#[serde(untagged)]
74pub enum AssistantSettingsContent {
75 Versioned(VersionedAssistantSettingsContent),
76 Legacy(LegacyAssistantSettingsContent),
77}
78
79impl JsonSchema for AssistantSettingsContent {
80 fn schema_name() -> String {
81 VersionedAssistantSettingsContent::schema_name()
82 }
83
84 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
85 VersionedAssistantSettingsContent::json_schema(gen)
86 }
87
88 fn is_referenceable() -> bool {
89 VersionedAssistantSettingsContent::is_referenceable()
90 }
91}
92
93impl Default for AssistantSettingsContent {
94 fn default() -> Self {
95 Self::Versioned(VersionedAssistantSettingsContent::default())
96 }
97}
98
99impl AssistantSettingsContent {
100 pub fn is_version_outdated(&self) -> bool {
101 match self {
102 AssistantSettingsContent::Versioned(settings) => match settings {
103 VersionedAssistantSettingsContent::V1(_) => true,
104 VersionedAssistantSettingsContent::V2(_) => false,
105 },
106 AssistantSettingsContent::Legacy(_) => true,
107 }
108 }
109
110 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
111 if let AssistantSettingsContent::Versioned(settings) = self {
112 if let VersionedAssistantSettingsContent::V1(settings) = settings {
113 if let Some(provider) = settings.provider.clone() {
114 match provider {
115 AssistantProviderContentV1::Anthropic { api_url, .. } => {
116 update_settings_file::<AllLanguageModelSettings>(
117 fs,
118 cx,
119 move |content, _| {
120 if content.anthropic.is_none() {
121 content.anthropic =
122 Some(AnthropicSettingsContent::Versioned(
123 VersionedAnthropicSettingsContent::V1(
124 AnthropicSettingsContentV1 {
125 api_url,
126 available_models: None,
127 },
128 ),
129 ));
130 }
131 },
132 )
133 }
134 AssistantProviderContentV1::Ollama { api_url, .. } => {
135 update_settings_file::<AllLanguageModelSettings>(
136 fs,
137 cx,
138 move |content, _| {
139 if content.ollama.is_none() {
140 content.ollama = Some(OllamaSettingsContent {
141 api_url,
142 available_models: None,
143 });
144 }
145 },
146 )
147 }
148 AssistantProviderContentV1::OpenAi {
149 api_url,
150 available_models,
151 ..
152 } => update_settings_file::<AllLanguageModelSettings>(
153 fs,
154 cx,
155 move |content, _| {
156 if content.openai.is_none() {
157 let available_models = available_models.map(|models| {
158 models
159 .into_iter()
160 .filter_map(|model| match model {
161 OpenAiModel::Custom {
162 name,
163 display_name,
164 max_tokens,
165 max_output_tokens,
166 max_completion_tokens: None,
167 } => Some(open_ai::AvailableModel {
168 name,
169 display_name,
170 max_tokens,
171 max_output_tokens,
172 max_completion_tokens: None,
173 }),
174 _ => None,
175 })
176 .collect::<Vec<_>>()
177 });
178 content.openai = Some(OpenAiSettingsContent::Versioned(
179 VersionedOpenAiSettingsContent::V1(
180 OpenAiSettingsContentV1 {
181 api_url,
182 available_models,
183 },
184 ),
185 ));
186 }
187 },
188 ),
189 _ => {}
190 }
191 }
192 }
193 }
194
195 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
196 self.upgrade(),
197 ));
198 }
199
200 fn upgrade(&self) -> AssistantSettingsContentV2 {
201 match self {
202 AssistantSettingsContent::Versioned(settings) => match settings {
203 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
204 enabled: settings.enabled,
205 button: settings.button,
206 dock: settings.dock,
207 default_width: settings.default_width,
208 default_height: settings.default_width,
209 default_model: settings
210 .provider
211 .clone()
212 .and_then(|provider| match provider {
213 AssistantProviderContentV1::ZedDotDev { default_model } => {
214 default_model.map(|model| LanguageModelSelection {
215 provider: "zed.dev".to_string(),
216 model: model.id().to_string(),
217 })
218 }
219 AssistantProviderContentV1::OpenAi { default_model, .. } => {
220 default_model.map(|model| LanguageModelSelection {
221 provider: "openai".to_string(),
222 model: model.id().to_string(),
223 })
224 }
225 AssistantProviderContentV1::Anthropic { default_model, .. } => {
226 default_model.map(|model| LanguageModelSelection {
227 provider: "anthropic".to_string(),
228 model: model.id().to_string(),
229 })
230 }
231 AssistantProviderContentV1::Ollama { default_model, .. } => {
232 default_model.map(|model| LanguageModelSelection {
233 provider: "ollama".to_string(),
234 model: model.id().to_string(),
235 })
236 }
237 }),
238 inline_alternatives: None,
239 enable_experimental_live_diffs: None,
240 },
241 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
242 },
243 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
244 enabled: None,
245 button: settings.button,
246 dock: settings.dock,
247 default_width: settings.default_width,
248 default_height: settings.default_height,
249 default_model: Some(LanguageModelSelection {
250 provider: "openai".to_string(),
251 model: settings
252 .default_open_ai_model
253 .clone()
254 .unwrap_or_default()
255 .id()
256 .to_string(),
257 }),
258 inline_alternatives: None,
259 enable_experimental_live_diffs: None,
260 },
261 }
262 }
263
264 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
265 match self {
266 AssistantSettingsContent::Versioned(settings) => match settings {
267 VersionedAssistantSettingsContent::V1(settings) => {
268 settings.dock = Some(dock);
269 }
270 VersionedAssistantSettingsContent::V2(settings) => {
271 settings.dock = Some(dock);
272 }
273 },
274 AssistantSettingsContent::Legacy(settings) => {
275 settings.dock = Some(dock);
276 }
277 }
278 }
279
280 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
281 let model = language_model.id().0.to_string();
282 let provider = language_model.provider_id().0.to_string();
283
284 match self {
285 AssistantSettingsContent::Versioned(settings) => match settings {
286 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
287 "zed.dev" => {
288 log::warn!("attempted to set zed.dev model on outdated settings");
289 }
290 "anthropic" => {
291 let api_url = match &settings.provider {
292 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
293 api_url.clone()
294 }
295 _ => None,
296 };
297 settings.provider = Some(AssistantProviderContentV1::Anthropic {
298 default_model: AnthropicModel::from_id(&model).ok(),
299 api_url,
300 });
301 }
302 "ollama" => {
303 let api_url = match &settings.provider {
304 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
305 api_url.clone()
306 }
307 _ => None,
308 };
309 settings.provider = Some(AssistantProviderContentV1::Ollama {
310 default_model: Some(ollama::Model::new(&model, None, None)),
311 api_url,
312 });
313 }
314 "openai" => {
315 let (api_url, available_models) = match &settings.provider {
316 Some(AssistantProviderContentV1::OpenAi {
317 api_url,
318 available_models,
319 ..
320 }) => (api_url.clone(), available_models.clone()),
321 _ => (None, None),
322 };
323 settings.provider = Some(AssistantProviderContentV1::OpenAi {
324 default_model: OpenAiModel::from_id(&model).ok(),
325 api_url,
326 available_models,
327 });
328 }
329 _ => {}
330 },
331 VersionedAssistantSettingsContent::V2(settings) => {
332 settings.default_model = Some(LanguageModelSelection { provider, model });
333 }
334 },
335 AssistantSettingsContent::Legacy(settings) => {
336 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
337 settings.default_open_ai_model = Some(model);
338 }
339 }
340 }
341 }
342}
343
344#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
345#[serde(tag = "version")]
346pub enum VersionedAssistantSettingsContent {
347 #[serde(rename = "1")]
348 V1(AssistantSettingsContentV1),
349 #[serde(rename = "2")]
350 V2(AssistantSettingsContentV2),
351}
352
353impl Default for VersionedAssistantSettingsContent {
354 fn default() -> Self {
355 Self::V2(AssistantSettingsContentV2 {
356 enabled: None,
357 button: None,
358 dock: None,
359 default_width: None,
360 default_height: None,
361 default_model: None,
362 inline_alternatives: None,
363 enable_experimental_live_diffs: None,
364 })
365 }
366}
367
368#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
369pub struct AssistantSettingsContentV2 {
370 /// Whether the Assistant is enabled.
371 ///
372 /// Default: true
373 enabled: Option<bool>,
374 /// Whether to show the assistant panel button in the status bar.
375 ///
376 /// Default: true
377 button: Option<bool>,
378 /// Where to dock the assistant.
379 ///
380 /// Default: right
381 dock: Option<AssistantDockPosition>,
382 /// Default width in pixels when the assistant is docked to the left or right.
383 ///
384 /// Default: 640
385 default_width: Option<f32>,
386 /// Default height in pixels when the assistant is docked to the bottom.
387 ///
388 /// Default: 320
389 default_height: Option<f32>,
390 /// The default model to use when creating new chats.
391 default_model: Option<LanguageModelSelection>,
392 /// Additional models with which to generate alternatives when performing inline assists.
393 inline_alternatives: Option<Vec<LanguageModelSelection>>,
394 /// Enable experimental live diffs in the assistant panel.
395 ///
396 /// Default: false
397 enable_experimental_live_diffs: Option<bool>,
398}
399
400#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
401pub struct LanguageModelSelection {
402 #[schemars(schema_with = "providers_schema")]
403 pub provider: String,
404 pub model: String,
405}
406
407fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
408 schemars::schema::SchemaObject {
409 enum_values: Some(vec![
410 "anthropic".into(),
411 "google".into(),
412 "ollama".into(),
413 "openai".into(),
414 "zed.dev".into(),
415 "copilot_chat".into(),
416 ]),
417 ..Default::default()
418 }
419 .into()
420}
421
422impl Default for LanguageModelSelection {
423 fn default() -> Self {
424 Self {
425 provider: "openai".to_string(),
426 model: "gpt-4".to_string(),
427 }
428 }
429}
430
431#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
432pub struct AssistantSettingsContentV1 {
433 /// Whether the Assistant is enabled.
434 ///
435 /// Default: true
436 enabled: Option<bool>,
437 /// Whether to show the assistant panel button in the status bar.
438 ///
439 /// Default: true
440 button: Option<bool>,
441 /// Where to dock the assistant.
442 ///
443 /// Default: right
444 dock: Option<AssistantDockPosition>,
445 /// Default width in pixels when the assistant is docked to the left or right.
446 ///
447 /// Default: 640
448 default_width: Option<f32>,
449 /// Default height in pixels when the assistant is docked to the bottom.
450 ///
451 /// Default: 320
452 default_height: Option<f32>,
453 /// The provider of the assistant service.
454 ///
455 /// This can be "openai", "anthropic", "ollama", "zed.dev"
456 /// each with their respective default models and configurations.
457 provider: Option<AssistantProviderContentV1>,
458}
459
460#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
461pub struct LegacyAssistantSettingsContent {
462 /// Whether to show the assistant panel button in the status bar.
463 ///
464 /// Default: true
465 pub button: Option<bool>,
466 /// Where to dock the assistant.
467 ///
468 /// Default: right
469 pub dock: Option<AssistantDockPosition>,
470 /// Default width in pixels when the assistant is docked to the left or right.
471 ///
472 /// Default: 640
473 pub default_width: Option<f32>,
474 /// Default height in pixels when the assistant is docked to the bottom.
475 ///
476 /// Default: 320
477 pub default_height: Option<f32>,
478 /// The default OpenAI model to use when creating new chats.
479 ///
480 /// Default: gpt-4-1106-preview
481 pub default_open_ai_model: Option<OpenAiModel>,
482 /// OpenAI API base URL to use when creating new chats.
483 ///
484 /// Default: https://api.openai.com/v1
485 pub openai_api_url: Option<String>,
486}
487
488impl Settings for AssistantSettings {
489 const KEY: Option<&'static str> = Some("assistant");
490
491 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
492
493 type FileContent = AssistantSettingsContent;
494
495 fn load(
496 sources: SettingsSources<Self::FileContent>,
497 _: &mut gpui::AppContext,
498 ) -> anyhow::Result<Self> {
499 let mut settings = AssistantSettings::default();
500
501 for value in sources.defaults_and_customizations() {
502 if value.is_version_outdated() {
503 settings.using_outdated_settings_version = true;
504 }
505
506 let value = value.upgrade();
507 merge(&mut settings.enabled, value.enabled);
508 merge(&mut settings.button, value.button);
509 merge(&mut settings.dock, value.dock);
510 merge(
511 &mut settings.default_width,
512 value.default_width.map(Into::into),
513 );
514 merge(
515 &mut settings.default_height,
516 value.default_height.map(Into::into),
517 );
518 merge(&mut settings.default_model, value.default_model);
519 merge(&mut settings.inline_alternatives, value.inline_alternatives);
520 merge(
521 &mut settings.enable_experimental_live_diffs,
522 value.enable_experimental_live_diffs,
523 );
524 }
525
526 Ok(settings)
527 }
528}
529
530fn merge<T>(target: &mut T, value: Option<T>) {
531 if let Some(value) = value {
532 *target = value;
533 }
534}
535
536#[cfg(test)]
537mod tests {
538 use gpui::{ReadGlobal, TestAppContext};
539
540 use super::*;
541
542 #[gpui::test]
543 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
544 let fs = fs::FakeFs::new(cx.executor().clone());
545 fs.create_dir(paths::settings_file().parent().unwrap())
546 .await
547 .unwrap();
548
549 cx.update(|cx| {
550 let test_settings = settings::SettingsStore::test(cx);
551 cx.set_global(test_settings);
552 AssistantSettings::register(cx);
553 });
554
555 cx.update(|cx| {
556 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
557 assert_eq!(
558 AssistantSettings::get_global(cx).default_model,
559 LanguageModelSelection {
560 provider: "zed.dev".into(),
561 model: "claude-3-5-sonnet".into(),
562 }
563 );
564 });
565
566 cx.update(|cx| {
567 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
568 fs.clone(),
569 |settings, _| {
570 *settings = AssistantSettingsContent::Versioned(
571 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
572 default_model: Some(LanguageModelSelection {
573 provider: "test-provider".into(),
574 model: "gpt-99".into(),
575 }),
576 inline_alternatives: None,
577 enabled: None,
578 button: None,
579 dock: None,
580 default_width: None,
581 default_height: None,
582 enable_experimental_live_diffs: None,
583 }),
584 )
585 },
586 );
587 });
588
589 cx.run_until_parked();
590
591 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
592 assert!(raw_settings_value.contains(r#""version": "2""#));
593
594 #[derive(Debug, Deserialize)]
595 struct AssistantSettingsTest {
596 assistant: AssistantSettingsContent,
597 }
598
599 let assistant_settings: AssistantSettingsTest =
600 serde_json_lenient::from_str(&raw_settings_value).unwrap();
601
602 assert!(!assistant_settings.assistant.is_version_outdated());
603 }
604}