1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use feature_flags::FeatureFlagAppExt;
6use fs::Fs;
7use gpui::{AppContext, Pixels};
8use language_model::provider::open_ai;
9use language_model::settings::{
10 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
11 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
12 VersionedOpenAiSettingsContent,
13};
14use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
15use ollama::Model as OllamaModel;
16use schemars::{schema::Schema, JsonSchema};
17use serde::{Deserialize, Serialize};
18use settings::{update_settings_file, Settings, SettingsSources};
19
20#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
21#[serde(rename_all = "snake_case")]
22pub enum AssistantDockPosition {
23 Left,
24 #[default]
25 Right,
26 Bottom,
27}
28
29#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
30#[serde(tag = "name", rename_all = "snake_case")]
31pub enum AssistantProviderContentV1 {
32 #[serde(rename = "zed.dev")]
33 ZedDotDev { default_model: Option<CloudModel> },
34 #[serde(rename = "openai")]
35 OpenAi {
36 default_model: Option<OpenAiModel>,
37 api_url: Option<String>,
38 low_speed_timeout_in_seconds: Option<u64>,
39 available_models: Option<Vec<OpenAiModel>>,
40 },
41 #[serde(rename = "anthropic")]
42 Anthropic {
43 default_model: Option<AnthropicModel>,
44 api_url: Option<String>,
45 low_speed_timeout_in_seconds: Option<u64>,
46 },
47 #[serde(rename = "ollama")]
48 Ollama {
49 default_model: Option<OllamaModel>,
50 api_url: Option<String>,
51 low_speed_timeout_in_seconds: Option<u64>,
52 },
53}
54
55#[derive(Debug, Default)]
56pub struct AssistantSettings {
57 pub enabled: bool,
58 pub button: bool,
59 pub dock: AssistantDockPosition,
60 pub default_width: Pixels,
61 pub default_height: Pixels,
62 pub default_model: LanguageModelSelection,
63 pub inline_alternatives: Vec<LanguageModelSelection>,
64 pub using_outdated_settings_version: bool,
65 pub enable_experimental_live_diffs: bool,
66}
67
68impl AssistantSettings {
69 pub fn are_live_diffs_enabled(&self, cx: &AppContext) -> bool {
70 cx.is_staff() || self.enable_experimental_live_diffs
71 }
72}
73
74/// Assistant panel settings
75#[derive(Clone, Serialize, Deserialize, Debug)]
76#[serde(untagged)]
77pub enum AssistantSettingsContent {
78 Versioned(VersionedAssistantSettingsContent),
79 Legacy(LegacyAssistantSettingsContent),
80}
81
82impl JsonSchema for AssistantSettingsContent {
83 fn schema_name() -> String {
84 VersionedAssistantSettingsContent::schema_name()
85 }
86
87 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
88 VersionedAssistantSettingsContent::json_schema(gen)
89 }
90
91 fn is_referenceable() -> bool {
92 VersionedAssistantSettingsContent::is_referenceable()
93 }
94}
95
96impl Default for AssistantSettingsContent {
97 fn default() -> Self {
98 Self::Versioned(VersionedAssistantSettingsContent::default())
99 }
100}
101
102impl AssistantSettingsContent {
103 pub fn is_version_outdated(&self) -> bool {
104 match self {
105 AssistantSettingsContent::Versioned(settings) => match settings {
106 VersionedAssistantSettingsContent::V1(_) => true,
107 VersionedAssistantSettingsContent::V2(_) => false,
108 },
109 AssistantSettingsContent::Legacy(_) => true,
110 }
111 }
112
113 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
114 if let AssistantSettingsContent::Versioned(settings) = self {
115 if let VersionedAssistantSettingsContent::V1(settings) = settings {
116 if let Some(provider) = settings.provider.clone() {
117 match provider {
118 AssistantProviderContentV1::Anthropic {
119 api_url,
120 low_speed_timeout_in_seconds,
121 ..
122 } => update_settings_file::<AllLanguageModelSettings>(
123 fs,
124 cx,
125 move |content, _| {
126 if content.anthropic.is_none() {
127 content.anthropic = Some(AnthropicSettingsContent::Versioned(
128 VersionedAnthropicSettingsContent::V1(
129 AnthropicSettingsContentV1 {
130 api_url,
131 low_speed_timeout_in_seconds,
132 available_models: None,
133 },
134 ),
135 ));
136 }
137 },
138 ),
139 AssistantProviderContentV1::Ollama {
140 api_url,
141 low_speed_timeout_in_seconds,
142 ..
143 } => update_settings_file::<AllLanguageModelSettings>(
144 fs,
145 cx,
146 move |content, _| {
147 if content.ollama.is_none() {
148 content.ollama = Some(OllamaSettingsContent {
149 api_url,
150 low_speed_timeout_in_seconds,
151 available_models: None,
152 });
153 }
154 },
155 ),
156 AssistantProviderContentV1::OpenAi {
157 api_url,
158 low_speed_timeout_in_seconds,
159 available_models,
160 ..
161 } => update_settings_file::<AllLanguageModelSettings>(
162 fs,
163 cx,
164 move |content, _| {
165 if content.openai.is_none() {
166 let available_models = available_models.map(|models| {
167 models
168 .into_iter()
169 .filter_map(|model| match model {
170 OpenAiModel::Custom {
171 name,
172 display_name,
173 max_tokens,
174 max_output_tokens,
175 max_completion_tokens: None,
176 } => Some(open_ai::AvailableModel {
177 name,
178 display_name,
179 max_tokens,
180 max_output_tokens,
181 max_completion_tokens: None,
182 }),
183 _ => None,
184 })
185 .collect::<Vec<_>>()
186 });
187 content.openai = Some(OpenAiSettingsContent::Versioned(
188 VersionedOpenAiSettingsContent::V1(
189 OpenAiSettingsContentV1 {
190 api_url,
191 low_speed_timeout_in_seconds,
192 available_models,
193 },
194 ),
195 ));
196 }
197 },
198 ),
199 _ => {}
200 }
201 }
202 }
203 }
204
205 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
206 self.upgrade(),
207 ));
208 }
209
210 fn upgrade(&self) -> AssistantSettingsContentV2 {
211 match self {
212 AssistantSettingsContent::Versioned(settings) => match settings {
213 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
214 enabled: settings.enabled,
215 button: settings.button,
216 dock: settings.dock,
217 default_width: settings.default_width,
218 default_height: settings.default_width,
219 default_model: settings
220 .provider
221 .clone()
222 .and_then(|provider| match provider {
223 AssistantProviderContentV1::ZedDotDev { default_model } => {
224 default_model.map(|model| LanguageModelSelection {
225 provider: "zed.dev".to_string(),
226 model: model.id().to_string(),
227 })
228 }
229 AssistantProviderContentV1::OpenAi { default_model, .. } => {
230 default_model.map(|model| LanguageModelSelection {
231 provider: "openai".to_string(),
232 model: model.id().to_string(),
233 })
234 }
235 AssistantProviderContentV1::Anthropic { default_model, .. } => {
236 default_model.map(|model| LanguageModelSelection {
237 provider: "anthropic".to_string(),
238 model: model.id().to_string(),
239 })
240 }
241 AssistantProviderContentV1::Ollama { default_model, .. } => {
242 default_model.map(|model| LanguageModelSelection {
243 provider: "ollama".to_string(),
244 model: model.id().to_string(),
245 })
246 }
247 }),
248 inline_alternatives: None,
249 enable_experimental_live_diffs: None,
250 },
251 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
252 },
253 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
254 enabled: None,
255 button: settings.button,
256 dock: settings.dock,
257 default_width: settings.default_width,
258 default_height: settings.default_height,
259 default_model: Some(LanguageModelSelection {
260 provider: "openai".to_string(),
261 model: settings
262 .default_open_ai_model
263 .clone()
264 .unwrap_or_default()
265 .id()
266 .to_string(),
267 }),
268 inline_alternatives: None,
269 enable_experimental_live_diffs: None,
270 },
271 }
272 }
273
274 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
275 match self {
276 AssistantSettingsContent::Versioned(settings) => match settings {
277 VersionedAssistantSettingsContent::V1(settings) => {
278 settings.dock = Some(dock);
279 }
280 VersionedAssistantSettingsContent::V2(settings) => {
281 settings.dock = Some(dock);
282 }
283 },
284 AssistantSettingsContent::Legacy(settings) => {
285 settings.dock = Some(dock);
286 }
287 }
288 }
289
290 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
291 let model = language_model.id().0.to_string();
292 let provider = language_model.provider_id().0.to_string();
293
294 match self {
295 AssistantSettingsContent::Versioned(settings) => match settings {
296 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
297 "zed.dev" => {
298 log::warn!("attempted to set zed.dev model on outdated settings");
299 }
300 "anthropic" => {
301 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
302 Some(AssistantProviderContentV1::Anthropic {
303 api_url,
304 low_speed_timeout_in_seconds,
305 ..
306 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
307 _ => (None, None),
308 };
309 settings.provider = Some(AssistantProviderContentV1::Anthropic {
310 default_model: AnthropicModel::from_id(&model).ok(),
311 api_url,
312 low_speed_timeout_in_seconds,
313 });
314 }
315 "ollama" => {
316 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
317 Some(AssistantProviderContentV1::Ollama {
318 api_url,
319 low_speed_timeout_in_seconds,
320 ..
321 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
322 _ => (None, None),
323 };
324 settings.provider = Some(AssistantProviderContentV1::Ollama {
325 default_model: Some(ollama::Model::new(&model, None, None)),
326 api_url,
327 low_speed_timeout_in_seconds,
328 });
329 }
330 "openai" => {
331 let (api_url, low_speed_timeout_in_seconds, available_models) =
332 match &settings.provider {
333 Some(AssistantProviderContentV1::OpenAi {
334 api_url,
335 low_speed_timeout_in_seconds,
336 available_models,
337 ..
338 }) => (
339 api_url.clone(),
340 *low_speed_timeout_in_seconds,
341 available_models.clone(),
342 ),
343 _ => (None, None, None),
344 };
345 settings.provider = Some(AssistantProviderContentV1::OpenAi {
346 default_model: OpenAiModel::from_id(&model).ok(),
347 api_url,
348 low_speed_timeout_in_seconds,
349 available_models,
350 });
351 }
352 _ => {}
353 },
354 VersionedAssistantSettingsContent::V2(settings) => {
355 settings.default_model = Some(LanguageModelSelection { provider, model });
356 }
357 },
358 AssistantSettingsContent::Legacy(settings) => {
359 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
360 settings.default_open_ai_model = Some(model);
361 }
362 }
363 }
364 }
365}
366
367#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
368#[serde(tag = "version")]
369pub enum VersionedAssistantSettingsContent {
370 #[serde(rename = "1")]
371 V1(AssistantSettingsContentV1),
372 #[serde(rename = "2")]
373 V2(AssistantSettingsContentV2),
374}
375
376impl Default for VersionedAssistantSettingsContent {
377 fn default() -> Self {
378 Self::V2(AssistantSettingsContentV2 {
379 enabled: None,
380 button: None,
381 dock: None,
382 default_width: None,
383 default_height: None,
384 default_model: None,
385 inline_alternatives: None,
386 enable_experimental_live_diffs: None,
387 })
388 }
389}
390
391#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
392pub struct AssistantSettingsContentV2 {
393 /// Whether the Assistant is enabled.
394 ///
395 /// Default: true
396 enabled: Option<bool>,
397 /// Whether to show the assistant panel button in the status bar.
398 ///
399 /// Default: true
400 button: Option<bool>,
401 /// Where to dock the assistant.
402 ///
403 /// Default: right
404 dock: Option<AssistantDockPosition>,
405 /// Default width in pixels when the assistant is docked to the left or right.
406 ///
407 /// Default: 640
408 default_width: Option<f32>,
409 /// Default height in pixels when the assistant is docked to the bottom.
410 ///
411 /// Default: 320
412 default_height: Option<f32>,
413 /// The default model to use when creating new chats.
414 default_model: Option<LanguageModelSelection>,
415 /// Additional models with which to generate alternatives when performing inline assists.
416 inline_alternatives: Option<Vec<LanguageModelSelection>>,
417 /// Enable experimental live diffs in the assistant panel.
418 ///
419 /// Default: false
420 enable_experimental_live_diffs: Option<bool>,
421}
422
423#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
424pub struct LanguageModelSelection {
425 #[schemars(schema_with = "providers_schema")]
426 pub provider: String,
427 pub model: String,
428}
429
430fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
431 schemars::schema::SchemaObject {
432 enum_values: Some(vec![
433 "anthropic".into(),
434 "google".into(),
435 "ollama".into(),
436 "openai".into(),
437 "zed.dev".into(),
438 "copilot_chat".into(),
439 ]),
440 ..Default::default()
441 }
442 .into()
443}
444
445impl Default for LanguageModelSelection {
446 fn default() -> Self {
447 Self {
448 provider: "openai".to_string(),
449 model: "gpt-4".to_string(),
450 }
451 }
452}
453
454#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
455pub struct AssistantSettingsContentV1 {
456 /// Whether the Assistant is enabled.
457 ///
458 /// Default: true
459 enabled: Option<bool>,
460 /// Whether to show the assistant panel button in the status bar.
461 ///
462 /// Default: true
463 button: Option<bool>,
464 /// Where to dock the assistant.
465 ///
466 /// Default: right
467 dock: Option<AssistantDockPosition>,
468 /// Default width in pixels when the assistant is docked to the left or right.
469 ///
470 /// Default: 640
471 default_width: Option<f32>,
472 /// Default height in pixels when the assistant is docked to the bottom.
473 ///
474 /// Default: 320
475 default_height: Option<f32>,
476 /// The provider of the assistant service.
477 ///
478 /// This can be "openai", "anthropic", "ollama", "zed.dev"
479 /// each with their respective default models and configurations.
480 provider: Option<AssistantProviderContentV1>,
481}
482
483#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
484pub struct LegacyAssistantSettingsContent {
485 /// Whether to show the assistant panel button in the status bar.
486 ///
487 /// Default: true
488 pub button: Option<bool>,
489 /// Where to dock the assistant.
490 ///
491 /// Default: right
492 pub dock: Option<AssistantDockPosition>,
493 /// Default width in pixels when the assistant is docked to the left or right.
494 ///
495 /// Default: 640
496 pub default_width: Option<f32>,
497 /// Default height in pixels when the assistant is docked to the bottom.
498 ///
499 /// Default: 320
500 pub default_height: Option<f32>,
501 /// The default OpenAI model to use when creating new chats.
502 ///
503 /// Default: gpt-4-1106-preview
504 pub default_open_ai_model: Option<OpenAiModel>,
505 /// OpenAI API base URL to use when creating new chats.
506 ///
507 /// Default: https://api.openai.com/v1
508 pub openai_api_url: Option<String>,
509}
510
511impl Settings for AssistantSettings {
512 const KEY: Option<&'static str> = Some("assistant");
513
514 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
515
516 type FileContent = AssistantSettingsContent;
517
518 fn load(
519 sources: SettingsSources<Self::FileContent>,
520 _: &mut gpui::AppContext,
521 ) -> anyhow::Result<Self> {
522 let mut settings = AssistantSettings::default();
523
524 for value in sources.defaults_and_customizations() {
525 if value.is_version_outdated() {
526 settings.using_outdated_settings_version = true;
527 }
528
529 let value = value.upgrade();
530 merge(&mut settings.enabled, value.enabled);
531 merge(&mut settings.button, value.button);
532 merge(&mut settings.dock, value.dock);
533 merge(
534 &mut settings.default_width,
535 value.default_width.map(Into::into),
536 );
537 merge(
538 &mut settings.default_height,
539 value.default_height.map(Into::into),
540 );
541 merge(&mut settings.default_model, value.default_model);
542 merge(&mut settings.inline_alternatives, value.inline_alternatives);
543 merge(
544 &mut settings.enable_experimental_live_diffs,
545 value.enable_experimental_live_diffs,
546 );
547 }
548
549 Ok(settings)
550 }
551}
552
553fn merge<T>(target: &mut T, value: Option<T>) {
554 if let Some(value) = value {
555 *target = value;
556 }
557}
558
559#[cfg(test)]
560mod tests {
561 use gpui::{ReadGlobal, TestAppContext};
562
563 use super::*;
564
565 #[gpui::test]
566 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
567 let fs = fs::FakeFs::new(cx.executor().clone());
568 fs.create_dir(paths::settings_file().parent().unwrap())
569 .await
570 .unwrap();
571
572 cx.update(|cx| {
573 let test_settings = settings::SettingsStore::test(cx);
574 cx.set_global(test_settings);
575 AssistantSettings::register(cx);
576 });
577
578 cx.update(|cx| {
579 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
580 assert_eq!(
581 AssistantSettings::get_global(cx).default_model,
582 LanguageModelSelection {
583 provider: "zed.dev".into(),
584 model: "claude-3-5-sonnet".into(),
585 }
586 );
587 });
588
589 cx.update(|cx| {
590 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
591 fs.clone(),
592 |settings, _| {
593 *settings = AssistantSettingsContent::Versioned(
594 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
595 default_model: Some(LanguageModelSelection {
596 provider: "test-provider".into(),
597 model: "gpt-99".into(),
598 }),
599 inline_alternatives: None,
600 enabled: None,
601 button: None,
602 dock: None,
603 default_width: None,
604 default_height: None,
605 enable_experimental_live_diffs: None,
606 }),
607 )
608 },
609 );
610 });
611
612 cx.run_until_parked();
613
614 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
615 assert!(raw_settings_value.contains(r#""version": "2""#));
616
617 #[derive(Debug, Deserialize)]
618 struct AssistantSettingsTest {
619 assistant: AssistantSettingsContent,
620 }
621
622 let assistant_settings: AssistantSettingsTest =
623 serde_json_lenient::from_str(&raw_settings_value).unwrap();
624
625 assert!(!assistant_settings.assistant.is_version_outdated());
626 }
627}