1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use fs::Fs;
6use gpui::{AppContext, Pixels};
7use language_model::provider::open_ai;
8use language_model::settings::{
9 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
10 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
11 VersionedOpenAiSettingsContent,
12};
13use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
14use ollama::Model as OllamaModel;
15use schemars::{schema::Schema, JsonSchema};
16use serde::{Deserialize, Serialize};
17use settings::{update_settings_file, Settings, SettingsSources};
18
19#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
20#[serde(rename_all = "snake_case")]
21pub enum AssistantDockPosition {
22 Left,
23 #[default]
24 Right,
25 Bottom,
26}
27
28#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
29#[serde(tag = "name", rename_all = "snake_case")]
30pub enum AssistantProviderContentV1 {
31 #[serde(rename = "zed.dev")]
32 ZedDotDev { default_model: Option<CloudModel> },
33 #[serde(rename = "openai")]
34 OpenAi {
35 default_model: Option<OpenAiModel>,
36 api_url: Option<String>,
37 low_speed_timeout_in_seconds: Option<u64>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 low_speed_timeout_in_seconds: Option<u64>,
45 },
46 #[serde(rename = "ollama")]
47 Ollama {
48 default_model: Option<OllamaModel>,
49 api_url: Option<String>,
50 low_speed_timeout_in_seconds: Option<u64>,
51 },
52}
53
54#[derive(Debug, Default)]
55pub struct AssistantSettings {
56 pub enabled: bool,
57 pub button: bool,
58 pub dock: AssistantDockPosition,
59 pub default_width: Pixels,
60 pub default_height: Pixels,
61 pub default_model: LanguageModelSelection,
62 pub using_outdated_settings_version: bool,
63}
64
65/// Assistant panel settings
66#[derive(Clone, Serialize, Deserialize, Debug)]
67#[serde(untagged)]
68pub enum AssistantSettingsContent {
69 Versioned(VersionedAssistantSettingsContent),
70 Legacy(LegacyAssistantSettingsContent),
71}
72
73impl JsonSchema for AssistantSettingsContent {
74 fn schema_name() -> String {
75 VersionedAssistantSettingsContent::schema_name()
76 }
77
78 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
79 VersionedAssistantSettingsContent::json_schema(gen)
80 }
81
82 fn is_referenceable() -> bool {
83 VersionedAssistantSettingsContent::is_referenceable()
84 }
85}
86
87impl Default for AssistantSettingsContent {
88 fn default() -> Self {
89 Self::Versioned(VersionedAssistantSettingsContent::default())
90 }
91}
92
93impl AssistantSettingsContent {
94 pub fn is_version_outdated(&self) -> bool {
95 match self {
96 AssistantSettingsContent::Versioned(settings) => match settings {
97 VersionedAssistantSettingsContent::V1(_) => true,
98 VersionedAssistantSettingsContent::V2(_) => false,
99 },
100 AssistantSettingsContent::Legacy(_) => true,
101 }
102 }
103
104 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
105 if let AssistantSettingsContent::Versioned(settings) = self {
106 if let VersionedAssistantSettingsContent::V1(settings) = settings {
107 if let Some(provider) = settings.provider.clone() {
108 match provider {
109 AssistantProviderContentV1::Anthropic {
110 api_url,
111 low_speed_timeout_in_seconds,
112 ..
113 } => update_settings_file::<AllLanguageModelSettings>(
114 fs,
115 cx,
116 move |content, _| {
117 if content.anthropic.is_none() {
118 content.anthropic = Some(AnthropicSettingsContent::Versioned(
119 VersionedAnthropicSettingsContent::V1(
120 AnthropicSettingsContentV1 {
121 api_url,
122 low_speed_timeout_in_seconds,
123 available_models: None,
124 },
125 ),
126 ));
127 }
128 },
129 ),
130 AssistantProviderContentV1::Ollama {
131 api_url,
132 low_speed_timeout_in_seconds,
133 ..
134 } => update_settings_file::<AllLanguageModelSettings>(
135 fs,
136 cx,
137 move |content, _| {
138 if content.ollama.is_none() {
139 content.ollama = Some(OllamaSettingsContent {
140 api_url,
141 low_speed_timeout_in_seconds,
142 available_models: None,
143 });
144 }
145 },
146 ),
147 AssistantProviderContentV1::OpenAi {
148 api_url,
149 low_speed_timeout_in_seconds,
150 available_models,
151 ..
152 } => update_settings_file::<AllLanguageModelSettings>(
153 fs,
154 cx,
155 move |content, _| {
156 if content.openai.is_none() {
157 let available_models = available_models.map(|models| {
158 models
159 .into_iter()
160 .filter_map(|model| match model {
161 OpenAiModel::Custom {
162 name,
163 display_name,
164 max_tokens,
165 max_output_tokens,
166 max_completion_tokens: None,
167 } => Some(open_ai::AvailableModel {
168 name,
169 display_name,
170 max_tokens,
171 max_output_tokens,
172 max_completion_tokens: None,
173 }),
174 _ => None,
175 })
176 .collect::<Vec<_>>()
177 });
178 content.openai = Some(OpenAiSettingsContent::Versioned(
179 VersionedOpenAiSettingsContent::V1(
180 OpenAiSettingsContentV1 {
181 api_url,
182 low_speed_timeout_in_seconds,
183 available_models,
184 },
185 ),
186 ));
187 }
188 },
189 ),
190 _ => {}
191 }
192 }
193 }
194 }
195
196 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
197 self.upgrade(),
198 ));
199 }
200
201 fn upgrade(&self) -> AssistantSettingsContentV2 {
202 match self {
203 AssistantSettingsContent::Versioned(settings) => match settings {
204 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
205 enabled: settings.enabled,
206 button: settings.button,
207 dock: settings.dock,
208 default_width: settings.default_width,
209 default_height: settings.default_width,
210 default_model: settings
211 .provider
212 .clone()
213 .and_then(|provider| match provider {
214 AssistantProviderContentV1::ZedDotDev { default_model } => {
215 default_model.map(|model| LanguageModelSelection {
216 provider: "zed.dev".to_string(),
217 model: model.id().to_string(),
218 })
219 }
220 AssistantProviderContentV1::OpenAi { default_model, .. } => {
221 default_model.map(|model| LanguageModelSelection {
222 provider: "openai".to_string(),
223 model: model.id().to_string(),
224 })
225 }
226 AssistantProviderContentV1::Anthropic { default_model, .. } => {
227 default_model.map(|model| LanguageModelSelection {
228 provider: "anthropic".to_string(),
229 model: model.id().to_string(),
230 })
231 }
232 AssistantProviderContentV1::Ollama { default_model, .. } => {
233 default_model.map(|model| LanguageModelSelection {
234 provider: "ollama".to_string(),
235 model: model.id().to_string(),
236 })
237 }
238 }),
239 },
240 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
241 },
242 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
243 enabled: None,
244 button: settings.button,
245 dock: settings.dock,
246 default_width: settings.default_width,
247 default_height: settings.default_height,
248 default_model: Some(LanguageModelSelection {
249 provider: "openai".to_string(),
250 model: settings
251 .default_open_ai_model
252 .clone()
253 .unwrap_or_default()
254 .id()
255 .to_string(),
256 }),
257 },
258 }
259 }
260
261 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
262 match self {
263 AssistantSettingsContent::Versioned(settings) => match settings {
264 VersionedAssistantSettingsContent::V1(settings) => {
265 settings.dock = Some(dock);
266 }
267 VersionedAssistantSettingsContent::V2(settings) => {
268 settings.dock = Some(dock);
269 }
270 },
271 AssistantSettingsContent::Legacy(settings) => {
272 settings.dock = Some(dock);
273 }
274 }
275 }
276
277 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
278 let model = language_model.id().0.to_string();
279 let provider = language_model.provider_id().0.to_string();
280
281 match self {
282 AssistantSettingsContent::Versioned(settings) => match settings {
283 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
284 "zed.dev" => {
285 log::warn!("attempted to set zed.dev model on outdated settings");
286 }
287 "anthropic" => {
288 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
289 Some(AssistantProviderContentV1::Anthropic {
290 api_url,
291 low_speed_timeout_in_seconds,
292 ..
293 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
294 _ => (None, None),
295 };
296 settings.provider = Some(AssistantProviderContentV1::Anthropic {
297 default_model: AnthropicModel::from_id(&model).ok(),
298 api_url,
299 low_speed_timeout_in_seconds,
300 });
301 }
302 "ollama" => {
303 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
304 Some(AssistantProviderContentV1::Ollama {
305 api_url,
306 low_speed_timeout_in_seconds,
307 ..
308 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
309 _ => (None, None),
310 };
311 settings.provider = Some(AssistantProviderContentV1::Ollama {
312 default_model: Some(ollama::Model::new(&model, None, None)),
313 api_url,
314 low_speed_timeout_in_seconds,
315 });
316 }
317 "openai" => {
318 let (api_url, low_speed_timeout_in_seconds, available_models) =
319 match &settings.provider {
320 Some(AssistantProviderContentV1::OpenAi {
321 api_url,
322 low_speed_timeout_in_seconds,
323 available_models,
324 ..
325 }) => (
326 api_url.clone(),
327 *low_speed_timeout_in_seconds,
328 available_models.clone(),
329 ),
330 _ => (None, None, None),
331 };
332 settings.provider = Some(AssistantProviderContentV1::OpenAi {
333 default_model: OpenAiModel::from_id(&model).ok(),
334 api_url,
335 low_speed_timeout_in_seconds,
336 available_models,
337 });
338 }
339 _ => {}
340 },
341 VersionedAssistantSettingsContent::V2(settings) => {
342 settings.default_model = Some(LanguageModelSelection { provider, model });
343 }
344 },
345 AssistantSettingsContent::Legacy(settings) => {
346 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
347 settings.default_open_ai_model = Some(model);
348 }
349 }
350 }
351 }
352}
353
354#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
355#[serde(tag = "version")]
356pub enum VersionedAssistantSettingsContent {
357 #[serde(rename = "1")]
358 V1(AssistantSettingsContentV1),
359 #[serde(rename = "2")]
360 V2(AssistantSettingsContentV2),
361}
362
363impl Default for VersionedAssistantSettingsContent {
364 fn default() -> Self {
365 Self::V2(AssistantSettingsContentV2 {
366 enabled: None,
367 button: None,
368 dock: None,
369 default_width: None,
370 default_height: None,
371 default_model: None,
372 })
373 }
374}
375
376#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
377pub struct AssistantSettingsContentV2 {
378 /// Whether the Assistant is enabled.
379 ///
380 /// Default: true
381 enabled: Option<bool>,
382 /// Whether to show the assistant panel button in the status bar.
383 ///
384 /// Default: true
385 button: Option<bool>,
386 /// Where to dock the assistant.
387 ///
388 /// Default: right
389 dock: Option<AssistantDockPosition>,
390 /// Default width in pixels when the assistant is docked to the left or right.
391 ///
392 /// Default: 640
393 default_width: Option<f32>,
394 /// Default height in pixels when the assistant is docked to the bottom.
395 ///
396 /// Default: 320
397 default_height: Option<f32>,
398 /// The default model to use when creating new contexts.
399 default_model: Option<LanguageModelSelection>,
400}
401
402#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
403pub struct LanguageModelSelection {
404 #[schemars(schema_with = "providers_schema")]
405 pub provider: String,
406 pub model: String,
407}
408
409fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
410 schemars::schema::SchemaObject {
411 enum_values: Some(vec![
412 "anthropic".into(),
413 "google".into(),
414 "ollama".into(),
415 "openai".into(),
416 "zed.dev".into(),
417 "copilot_chat".into(),
418 ]),
419 ..Default::default()
420 }
421 .into()
422}
423
424impl Default for LanguageModelSelection {
425 fn default() -> Self {
426 Self {
427 provider: "openai".to_string(),
428 model: "gpt-4".to_string(),
429 }
430 }
431}
432
433#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
434pub struct AssistantSettingsContentV1 {
435 /// Whether the Assistant is enabled.
436 ///
437 /// Default: true
438 enabled: Option<bool>,
439 /// Whether to show the assistant panel button in the status bar.
440 ///
441 /// Default: true
442 button: Option<bool>,
443 /// Where to dock the assistant.
444 ///
445 /// Default: right
446 dock: Option<AssistantDockPosition>,
447 /// Default width in pixels when the assistant is docked to the left or right.
448 ///
449 /// Default: 640
450 default_width: Option<f32>,
451 /// Default height in pixels when the assistant is docked to the bottom.
452 ///
453 /// Default: 320
454 default_height: Option<f32>,
455 /// The provider of the assistant service.
456 ///
457 /// This can be "openai", "anthropic", "ollama", "zed.dev"
458 /// each with their respective default models and configurations.
459 provider: Option<AssistantProviderContentV1>,
460}
461
462#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
463pub struct LegacyAssistantSettingsContent {
464 /// Whether to show the assistant panel button in the status bar.
465 ///
466 /// Default: true
467 pub button: Option<bool>,
468 /// Where to dock the assistant.
469 ///
470 /// Default: right
471 pub dock: Option<AssistantDockPosition>,
472 /// Default width in pixels when the assistant is docked to the left or right.
473 ///
474 /// Default: 640
475 pub default_width: Option<f32>,
476 /// Default height in pixels when the assistant is docked to the bottom.
477 ///
478 /// Default: 320
479 pub default_height: Option<f32>,
480 /// The default OpenAI model to use when creating new contexts.
481 ///
482 /// Default: gpt-4-1106-preview
483 pub default_open_ai_model: Option<OpenAiModel>,
484 /// OpenAI API base URL to use when creating new contexts.
485 ///
486 /// Default: https://api.openai.com/v1
487 pub openai_api_url: Option<String>,
488}
489
490impl Settings for AssistantSettings {
491 const KEY: Option<&'static str> = Some("assistant");
492
493 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
494
495 type FileContent = AssistantSettingsContent;
496
497 fn load(
498 sources: SettingsSources<Self::FileContent>,
499 _: &mut gpui::AppContext,
500 ) -> anyhow::Result<Self> {
501 let mut settings = AssistantSettings::default();
502
503 for value in sources.defaults_and_customizations() {
504 if value.is_version_outdated() {
505 settings.using_outdated_settings_version = true;
506 }
507
508 let value = value.upgrade();
509 merge(&mut settings.enabled, value.enabled);
510 merge(&mut settings.button, value.button);
511 merge(&mut settings.dock, value.dock);
512 merge(
513 &mut settings.default_width,
514 value.default_width.map(Into::into),
515 );
516 merge(
517 &mut settings.default_height,
518 value.default_height.map(Into::into),
519 );
520 merge(
521 &mut settings.default_model,
522 value.default_model.map(Into::into),
523 );
524 // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference
525 }
526
527 Ok(settings)
528 }
529}
530
531fn merge<T>(target: &mut T, value: Option<T>) {
532 if let Some(value) = value {
533 *target = value;
534 }
535}
536
537#[cfg(test)]
538mod tests {
539 use gpui::{ReadGlobal, TestAppContext};
540
541 use super::*;
542
543 #[gpui::test]
544 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
545 let fs = fs::FakeFs::new(cx.executor().clone());
546 fs.create_dir(paths::settings_file().parent().unwrap())
547 .await
548 .unwrap();
549
550 cx.update(|cx| {
551 let test_settings = settings::SettingsStore::test(cx);
552 cx.set_global(test_settings);
553 AssistantSettings::register(cx);
554 });
555
556 cx.update(|cx| {
557 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
558 assert_eq!(
559 AssistantSettings::get_global(cx).default_model,
560 LanguageModelSelection {
561 provider: "zed.dev".into(),
562 model: "claude-3-5-sonnet".into(),
563 }
564 );
565 });
566
567 cx.update(|cx| {
568 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
569 fs.clone(),
570 |settings, _| {
571 *settings = AssistantSettingsContent::Versioned(
572 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
573 default_model: Some(LanguageModelSelection {
574 provider: "test-provider".into(),
575 model: "gpt-99".into(),
576 }),
577 enabled: None,
578 button: None,
579 dock: None,
580 default_width: None,
581 default_height: None,
582 }),
583 )
584 },
585 );
586 });
587
588 cx.run_until_parked();
589
590 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
591 assert!(raw_settings_value.contains(r#""version": "2""#));
592
593 #[derive(Debug, Deserialize)]
594 struct AssistantSettingsTest {
595 assistant: AssistantSettingsContent,
596 }
597
598 let assistant_settings: AssistantSettingsTest =
599 serde_json_lenient::from_str(&raw_settings_value).unwrap();
600
601 assert!(!assistant_settings.assistant.is_version_outdated());
602 }
603}