1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use fs::Fs;
6use gpui::{AppContext, Pixels};
7use language_model::provider::open_ai;
8use language_model::settings::{
9 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
10 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
11 VersionedOpenAiSettingsContent,
12};
13use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
14use ollama::Model as OllamaModel;
15use schemars::{schema::Schema, JsonSchema};
16use serde::{Deserialize, Serialize};
17use settings::{update_settings_file, Settings, SettingsSources};
18
19#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
20#[serde(rename_all = "snake_case")]
21pub enum AssistantDockPosition {
22 Left,
23 #[default]
24 Right,
25 Bottom,
26}
27
28#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
29#[serde(tag = "name", rename_all = "snake_case")]
30pub enum AssistantProviderContentV1 {
31 #[serde(rename = "zed.dev")]
32 ZedDotDev { default_model: Option<CloudModel> },
33 #[serde(rename = "openai")]
34 OpenAi {
35 default_model: Option<OpenAiModel>,
36 api_url: Option<String>,
37 low_speed_timeout_in_seconds: Option<u64>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 low_speed_timeout_in_seconds: Option<u64>,
45 },
46 #[serde(rename = "ollama")]
47 Ollama {
48 default_model: Option<OllamaModel>,
49 api_url: Option<String>,
50 low_speed_timeout_in_seconds: Option<u64>,
51 },
52}
53
54#[derive(Debug, Default)]
55pub struct AssistantSettings {
56 pub enabled: bool,
57 pub button: bool,
58 pub dock: AssistantDockPosition,
59 pub default_width: Pixels,
60 pub default_height: Pixels,
61 pub default_model: LanguageModelSelection,
62 pub using_outdated_settings_version: bool,
63}
64
65/// Assistant panel settings
66#[derive(Clone, Serialize, Deserialize, Debug)]
67#[serde(untagged)]
68pub enum AssistantSettingsContent {
69 Versioned(VersionedAssistantSettingsContent),
70 Legacy(LegacyAssistantSettingsContent),
71}
72
73impl JsonSchema for AssistantSettingsContent {
74 fn schema_name() -> String {
75 VersionedAssistantSettingsContent::schema_name()
76 }
77
78 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
79 VersionedAssistantSettingsContent::json_schema(gen)
80 }
81
82 fn is_referenceable() -> bool {
83 VersionedAssistantSettingsContent::is_referenceable()
84 }
85}
86
87impl Default for AssistantSettingsContent {
88 fn default() -> Self {
89 Self::Versioned(VersionedAssistantSettingsContent::default())
90 }
91}
92
93impl AssistantSettingsContent {
94 pub fn is_version_outdated(&self) -> bool {
95 match self {
96 AssistantSettingsContent::Versioned(settings) => match settings {
97 VersionedAssistantSettingsContent::V1(_) => true,
98 VersionedAssistantSettingsContent::V2(_) => false,
99 },
100 AssistantSettingsContent::Legacy(_) => true,
101 }
102 }
103
104 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
105 if let AssistantSettingsContent::Versioned(settings) = self {
106 if let VersionedAssistantSettingsContent::V1(settings) = settings {
107 if let Some(provider) = settings.provider.clone() {
108 match provider {
109 AssistantProviderContentV1::Anthropic {
110 api_url,
111 low_speed_timeout_in_seconds,
112 ..
113 } => update_settings_file::<AllLanguageModelSettings>(
114 fs,
115 cx,
116 move |content, _| {
117 if content.anthropic.is_none() {
118 content.anthropic = Some(AnthropicSettingsContent::Versioned(
119 VersionedAnthropicSettingsContent::V1(
120 AnthropicSettingsContentV1 {
121 api_url,
122 low_speed_timeout_in_seconds,
123 available_models: None,
124 },
125 ),
126 ));
127 }
128 },
129 ),
130 AssistantProviderContentV1::Ollama {
131 api_url,
132 low_speed_timeout_in_seconds,
133 ..
134 } => update_settings_file::<AllLanguageModelSettings>(
135 fs,
136 cx,
137 move |content, _| {
138 if content.ollama.is_none() {
139 content.ollama = Some(OllamaSettingsContent {
140 api_url,
141 low_speed_timeout_in_seconds,
142 available_models: None,
143 });
144 }
145 },
146 ),
147 AssistantProviderContentV1::OpenAi {
148 api_url,
149 low_speed_timeout_in_seconds,
150 available_models,
151 ..
152 } => update_settings_file::<AllLanguageModelSettings>(
153 fs,
154 cx,
155 move |content, _| {
156 if content.openai.is_none() {
157 let available_models = available_models.map(|models| {
158 models
159 .into_iter()
160 .filter_map(|model| match model {
161 OpenAiModel::Custom {
162 name,
163 display_name,
164 max_tokens,
165 max_output_tokens,
166 } => Some(open_ai::AvailableModel {
167 name,
168 display_name,
169 max_tokens,
170 max_output_tokens,
171 }),
172 _ => None,
173 })
174 .collect::<Vec<_>>()
175 });
176 content.openai = Some(OpenAiSettingsContent::Versioned(
177 VersionedOpenAiSettingsContent::V1(
178 OpenAiSettingsContentV1 {
179 api_url,
180 low_speed_timeout_in_seconds,
181 available_models,
182 },
183 ),
184 ));
185 }
186 },
187 ),
188 _ => {}
189 }
190 }
191 }
192 }
193
194 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
195 self.upgrade(),
196 ));
197 }
198
199 fn upgrade(&self) -> AssistantSettingsContentV2 {
200 match self {
201 AssistantSettingsContent::Versioned(settings) => match settings {
202 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
203 enabled: settings.enabled,
204 button: settings.button,
205 dock: settings.dock,
206 default_width: settings.default_width,
207 default_height: settings.default_width,
208 default_model: settings
209 .provider
210 .clone()
211 .and_then(|provider| match provider {
212 AssistantProviderContentV1::ZedDotDev { default_model } => {
213 default_model.map(|model| LanguageModelSelection {
214 provider: "zed.dev".to_string(),
215 model: model.id().to_string(),
216 })
217 }
218 AssistantProviderContentV1::OpenAi { default_model, .. } => {
219 default_model.map(|model| LanguageModelSelection {
220 provider: "openai".to_string(),
221 model: model.id().to_string(),
222 })
223 }
224 AssistantProviderContentV1::Anthropic { default_model, .. } => {
225 default_model.map(|model| LanguageModelSelection {
226 provider: "anthropic".to_string(),
227 model: model.id().to_string(),
228 })
229 }
230 AssistantProviderContentV1::Ollama { default_model, .. } => {
231 default_model.map(|model| LanguageModelSelection {
232 provider: "ollama".to_string(),
233 model: model.id().to_string(),
234 })
235 }
236 }),
237 },
238 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
239 },
240 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
241 enabled: None,
242 button: settings.button,
243 dock: settings.dock,
244 default_width: settings.default_width,
245 default_height: settings.default_height,
246 default_model: Some(LanguageModelSelection {
247 provider: "openai".to_string(),
248 model: settings
249 .default_open_ai_model
250 .clone()
251 .unwrap_or_default()
252 .id()
253 .to_string(),
254 }),
255 },
256 }
257 }
258
259 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
260 match self {
261 AssistantSettingsContent::Versioned(settings) => match settings {
262 VersionedAssistantSettingsContent::V1(settings) => {
263 settings.dock = Some(dock);
264 }
265 VersionedAssistantSettingsContent::V2(settings) => {
266 settings.dock = Some(dock);
267 }
268 },
269 AssistantSettingsContent::Legacy(settings) => {
270 settings.dock = Some(dock);
271 }
272 }
273 }
274
275 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
276 let model = language_model.id().0.to_string();
277 let provider = language_model.provider_id().0.to_string();
278
279 match self {
280 AssistantSettingsContent::Versioned(settings) => match settings {
281 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
282 "zed.dev" => {
283 log::warn!("attempted to set zed.dev model on outdated settings");
284 }
285 "anthropic" => {
286 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
287 Some(AssistantProviderContentV1::Anthropic {
288 api_url,
289 low_speed_timeout_in_seconds,
290 ..
291 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
292 _ => (None, None),
293 };
294 settings.provider = Some(AssistantProviderContentV1::Anthropic {
295 default_model: AnthropicModel::from_id(&model).ok(),
296 api_url,
297 low_speed_timeout_in_seconds,
298 });
299 }
300 "ollama" => {
301 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
302 Some(AssistantProviderContentV1::Ollama {
303 api_url,
304 low_speed_timeout_in_seconds,
305 ..
306 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
307 _ => (None, None),
308 };
309 settings.provider = Some(AssistantProviderContentV1::Ollama {
310 default_model: Some(ollama::Model::new(&model, None, None)),
311 api_url,
312 low_speed_timeout_in_seconds,
313 });
314 }
315 "openai" => {
316 let (api_url, low_speed_timeout_in_seconds, available_models) =
317 match &settings.provider {
318 Some(AssistantProviderContentV1::OpenAi {
319 api_url,
320 low_speed_timeout_in_seconds,
321 available_models,
322 ..
323 }) => (
324 api_url.clone(),
325 *low_speed_timeout_in_seconds,
326 available_models.clone(),
327 ),
328 _ => (None, None, None),
329 };
330 settings.provider = Some(AssistantProviderContentV1::OpenAi {
331 default_model: OpenAiModel::from_id(&model).ok(),
332 api_url,
333 low_speed_timeout_in_seconds,
334 available_models,
335 });
336 }
337 _ => {}
338 },
339 VersionedAssistantSettingsContent::V2(settings) => {
340 settings.default_model = Some(LanguageModelSelection { provider, model });
341 }
342 },
343 AssistantSettingsContent::Legacy(settings) => {
344 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
345 settings.default_open_ai_model = Some(model);
346 }
347 }
348 }
349 }
350}
351
352#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
353#[serde(tag = "version")]
354pub enum VersionedAssistantSettingsContent {
355 #[serde(rename = "1")]
356 V1(AssistantSettingsContentV1),
357 #[serde(rename = "2")]
358 V2(AssistantSettingsContentV2),
359}
360
361impl Default for VersionedAssistantSettingsContent {
362 fn default() -> Self {
363 Self::V2(AssistantSettingsContentV2 {
364 enabled: None,
365 button: None,
366 dock: None,
367 default_width: None,
368 default_height: None,
369 default_model: None,
370 })
371 }
372}
373
374#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
375pub struct AssistantSettingsContentV2 {
376 /// Whether the Assistant is enabled.
377 ///
378 /// Default: true
379 enabled: Option<bool>,
380 /// Whether to show the assistant panel button in the status bar.
381 ///
382 /// Default: true
383 button: Option<bool>,
384 /// Where to dock the assistant.
385 ///
386 /// Default: right
387 dock: Option<AssistantDockPosition>,
388 /// Default width in pixels when the assistant is docked to the left or right.
389 ///
390 /// Default: 640
391 default_width: Option<f32>,
392 /// Default height in pixels when the assistant is docked to the bottom.
393 ///
394 /// Default: 320
395 default_height: Option<f32>,
396 /// The default model to use when creating new contexts.
397 default_model: Option<LanguageModelSelection>,
398}
399
400#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
401pub struct LanguageModelSelection {
402 #[schemars(schema_with = "providers_schema")]
403 pub provider: String,
404 pub model: String,
405}
406
407fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
408 schemars::schema::SchemaObject {
409 enum_values: Some(vec![
410 "anthropic".into(),
411 "google".into(),
412 "ollama".into(),
413 "openai".into(),
414 "zed.dev".into(),
415 "copilot_chat".into(),
416 ]),
417 ..Default::default()
418 }
419 .into()
420}
421
422impl Default for LanguageModelSelection {
423 fn default() -> Self {
424 Self {
425 provider: "openai".to_string(),
426 model: "gpt-4".to_string(),
427 }
428 }
429}
430
431#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
432pub struct AssistantSettingsContentV1 {
433 /// Whether the Assistant is enabled.
434 ///
435 /// Default: true
436 enabled: Option<bool>,
437 /// Whether to show the assistant panel button in the status bar.
438 ///
439 /// Default: true
440 button: Option<bool>,
441 /// Where to dock the assistant.
442 ///
443 /// Default: right
444 dock: Option<AssistantDockPosition>,
445 /// Default width in pixels when the assistant is docked to the left or right.
446 ///
447 /// Default: 640
448 default_width: Option<f32>,
449 /// Default height in pixels when the assistant is docked to the bottom.
450 ///
451 /// Default: 320
452 default_height: Option<f32>,
453 /// The provider of the assistant service.
454 ///
455 /// This can be "openai", "anthropic", "ollama", "zed.dev"
456 /// each with their respective default models and configurations.
457 provider: Option<AssistantProviderContentV1>,
458}
459
460#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
461pub struct LegacyAssistantSettingsContent {
462 /// Whether to show the assistant panel button in the status bar.
463 ///
464 /// Default: true
465 pub button: Option<bool>,
466 /// Where to dock the assistant.
467 ///
468 /// Default: right
469 pub dock: Option<AssistantDockPosition>,
470 /// Default width in pixels when the assistant is docked to the left or right.
471 ///
472 /// Default: 640
473 pub default_width: Option<f32>,
474 /// Default height in pixels when the assistant is docked to the bottom.
475 ///
476 /// Default: 320
477 pub default_height: Option<f32>,
478 /// The default OpenAI model to use when creating new contexts.
479 ///
480 /// Default: gpt-4-1106-preview
481 pub default_open_ai_model: Option<OpenAiModel>,
482 /// OpenAI API base URL to use when creating new contexts.
483 ///
484 /// Default: https://api.openai.com/v1
485 pub openai_api_url: Option<String>,
486}
487
488impl Settings for AssistantSettings {
489 const KEY: Option<&'static str> = Some("assistant");
490
491 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
492
493 type FileContent = AssistantSettingsContent;
494
495 fn load(
496 sources: SettingsSources<Self::FileContent>,
497 _: &mut gpui::AppContext,
498 ) -> anyhow::Result<Self> {
499 let mut settings = AssistantSettings::default();
500
501 for value in sources.defaults_and_customizations() {
502 if value.is_version_outdated() {
503 settings.using_outdated_settings_version = true;
504 }
505
506 let value = value.upgrade();
507 merge(&mut settings.enabled, value.enabled);
508 merge(&mut settings.button, value.button);
509 merge(&mut settings.dock, value.dock);
510 merge(
511 &mut settings.default_width,
512 value.default_width.map(Into::into),
513 );
514 merge(
515 &mut settings.default_height,
516 value.default_height.map(Into::into),
517 );
518 merge(
519 &mut settings.default_model,
520 value.default_model.map(Into::into),
521 );
522 }
523
524 Ok(settings)
525 }
526}
527
528fn merge<T>(target: &mut T, value: Option<T>) {
529 if let Some(value) = value {
530 *target = value;
531 }
532}
533
534#[cfg(test)]
535mod tests {
536 use gpui::{ReadGlobal, TestAppContext};
537
538 use super::*;
539
540 #[gpui::test]
541 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
542 let fs = fs::FakeFs::new(cx.executor().clone());
543 fs.create_dir(paths::settings_file().parent().unwrap())
544 .await
545 .unwrap();
546
547 cx.update(|cx| {
548 let test_settings = settings::SettingsStore::test(cx);
549 cx.set_global(test_settings);
550 AssistantSettings::register(cx);
551 });
552
553 cx.update(|cx| {
554 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
555 assert_eq!(
556 AssistantSettings::get_global(cx).default_model,
557 LanguageModelSelection {
558 provider: "zed.dev".into(),
559 model: "claude-3-5-sonnet".into(),
560 }
561 );
562 });
563
564 cx.update(|cx| {
565 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
566 fs.clone(),
567 |settings, _| {
568 *settings = AssistantSettingsContent::Versioned(
569 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
570 default_model: Some(LanguageModelSelection {
571 provider: "test-provider".into(),
572 model: "gpt-99".into(),
573 }),
574 enabled: None,
575 button: None,
576 dock: None,
577 default_width: None,
578 default_height: None,
579 }),
580 )
581 },
582 );
583 });
584
585 cx.run_until_parked();
586
587 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
588 assert!(raw_settings_value.contains(r#""version": "2""#));
589
590 #[derive(Debug, Deserialize)]
591 struct AssistantSettingsTest {
592 assistant: AssistantSettingsContent,
593 }
594
595 let assistant_settings: AssistantSettingsTest =
596 serde_json_lenient::from_str(&raw_settings_value).unwrap();
597
598 assert!(!assistant_settings.assistant.is_version_outdated());
599 }
600}