1use std::sync::Arc;
2
3use ::open_ai::Model as OpenAiModel;
4use anthropic::Model as AnthropicModel;
5use fs::Fs;
6use gpui::{AppContext, Pixels};
7use language_model::provider::open_ai;
8use language_model::settings::{
9 AnthropicSettingsContent, AnthropicSettingsContentV1, OllamaSettingsContent,
10 OpenAiSettingsContent, OpenAiSettingsContentV1, VersionedAnthropicSettingsContent,
11 VersionedOpenAiSettingsContent,
12};
13use language_model::{settings::AllLanguageModelSettings, CloudModel, LanguageModel};
14use ollama::Model as OllamaModel;
15use schemars::{schema::Schema, JsonSchema};
16use serde::{Deserialize, Serialize};
17use settings::{update_settings_file, Settings, SettingsSources};
18
19#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
20#[serde(rename_all = "snake_case")]
21pub enum AssistantDockPosition {
22 Left,
23 #[default]
24 Right,
25 Bottom,
26}
27
28#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
29#[serde(tag = "name", rename_all = "snake_case")]
30pub enum AssistantProviderContentV1 {
31 #[serde(rename = "zed.dev")]
32 ZedDotDev { default_model: Option<CloudModel> },
33 #[serde(rename = "openai")]
34 OpenAi {
35 default_model: Option<OpenAiModel>,
36 api_url: Option<String>,
37 low_speed_timeout_in_seconds: Option<u64>,
38 available_models: Option<Vec<OpenAiModel>>,
39 },
40 #[serde(rename = "anthropic")]
41 Anthropic {
42 default_model: Option<AnthropicModel>,
43 api_url: Option<String>,
44 low_speed_timeout_in_seconds: Option<u64>,
45 },
46 #[serde(rename = "ollama")]
47 Ollama {
48 default_model: Option<OllamaModel>,
49 api_url: Option<String>,
50 low_speed_timeout_in_seconds: Option<u64>,
51 },
52}
53
54#[derive(Debug, Default)]
55pub struct AssistantSettings {
56 pub enabled: bool,
57 pub button: bool,
58 pub dock: AssistantDockPosition,
59 pub default_width: Pixels,
60 pub default_height: Pixels,
61 pub default_model: LanguageModelSelection,
62 pub inline_alternatives: Vec<LanguageModelSelection>,
63 pub using_outdated_settings_version: bool,
64}
65
66/// Assistant panel settings
67#[derive(Clone, Serialize, Deserialize, Debug)]
68#[serde(untagged)]
69pub enum AssistantSettingsContent {
70 Versioned(VersionedAssistantSettingsContent),
71 Legacy(LegacyAssistantSettingsContent),
72}
73
74impl JsonSchema for AssistantSettingsContent {
75 fn schema_name() -> String {
76 VersionedAssistantSettingsContent::schema_name()
77 }
78
79 fn json_schema(gen: &mut schemars::gen::SchemaGenerator) -> Schema {
80 VersionedAssistantSettingsContent::json_schema(gen)
81 }
82
83 fn is_referenceable() -> bool {
84 VersionedAssistantSettingsContent::is_referenceable()
85 }
86}
87
88impl Default for AssistantSettingsContent {
89 fn default() -> Self {
90 Self::Versioned(VersionedAssistantSettingsContent::default())
91 }
92}
93
94impl AssistantSettingsContent {
95 pub fn is_version_outdated(&self) -> bool {
96 match self {
97 AssistantSettingsContent::Versioned(settings) => match settings {
98 VersionedAssistantSettingsContent::V1(_) => true,
99 VersionedAssistantSettingsContent::V2(_) => false,
100 },
101 AssistantSettingsContent::Legacy(_) => true,
102 }
103 }
104
105 pub fn update_file(&mut self, fs: Arc<dyn Fs>, cx: &AppContext) {
106 if let AssistantSettingsContent::Versioned(settings) = self {
107 if let VersionedAssistantSettingsContent::V1(settings) = settings {
108 if let Some(provider) = settings.provider.clone() {
109 match provider {
110 AssistantProviderContentV1::Anthropic {
111 api_url,
112 low_speed_timeout_in_seconds,
113 ..
114 } => update_settings_file::<AllLanguageModelSettings>(
115 fs,
116 cx,
117 move |content, _| {
118 if content.anthropic.is_none() {
119 content.anthropic = Some(AnthropicSettingsContent::Versioned(
120 VersionedAnthropicSettingsContent::V1(
121 AnthropicSettingsContentV1 {
122 api_url,
123 low_speed_timeout_in_seconds,
124 available_models: None,
125 },
126 ),
127 ));
128 }
129 },
130 ),
131 AssistantProviderContentV1::Ollama {
132 api_url,
133 low_speed_timeout_in_seconds,
134 ..
135 } => update_settings_file::<AllLanguageModelSettings>(
136 fs,
137 cx,
138 move |content, _| {
139 if content.ollama.is_none() {
140 content.ollama = Some(OllamaSettingsContent {
141 api_url,
142 low_speed_timeout_in_seconds,
143 available_models: None,
144 });
145 }
146 },
147 ),
148 AssistantProviderContentV1::OpenAi {
149 api_url,
150 low_speed_timeout_in_seconds,
151 available_models,
152 ..
153 } => update_settings_file::<AllLanguageModelSettings>(
154 fs,
155 cx,
156 move |content, _| {
157 if content.openai.is_none() {
158 let available_models = available_models.map(|models| {
159 models
160 .into_iter()
161 .filter_map(|model| match model {
162 OpenAiModel::Custom {
163 name,
164 display_name,
165 max_tokens,
166 max_output_tokens,
167 max_completion_tokens: None,
168 } => Some(open_ai::AvailableModel {
169 name,
170 display_name,
171 max_tokens,
172 max_output_tokens,
173 max_completion_tokens: None,
174 }),
175 _ => None,
176 })
177 .collect::<Vec<_>>()
178 });
179 content.openai = Some(OpenAiSettingsContent::Versioned(
180 VersionedOpenAiSettingsContent::V1(
181 OpenAiSettingsContentV1 {
182 api_url,
183 low_speed_timeout_in_seconds,
184 available_models,
185 },
186 ),
187 ));
188 }
189 },
190 ),
191 _ => {}
192 }
193 }
194 }
195 }
196
197 *self = AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
198 self.upgrade(),
199 ));
200 }
201
202 fn upgrade(&self) -> AssistantSettingsContentV2 {
203 match self {
204 AssistantSettingsContent::Versioned(settings) => match settings {
205 VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
206 enabled: settings.enabled,
207 button: settings.button,
208 dock: settings.dock,
209 default_width: settings.default_width,
210 default_height: settings.default_width,
211 default_model: settings
212 .provider
213 .clone()
214 .and_then(|provider| match provider {
215 AssistantProviderContentV1::ZedDotDev { default_model } => {
216 default_model.map(|model| LanguageModelSelection {
217 provider: "zed.dev".to_string(),
218 model: model.id().to_string(),
219 })
220 }
221 AssistantProviderContentV1::OpenAi { default_model, .. } => {
222 default_model.map(|model| LanguageModelSelection {
223 provider: "openai".to_string(),
224 model: model.id().to_string(),
225 })
226 }
227 AssistantProviderContentV1::Anthropic { default_model, .. } => {
228 default_model.map(|model| LanguageModelSelection {
229 provider: "anthropic".to_string(),
230 model: model.id().to_string(),
231 })
232 }
233 AssistantProviderContentV1::Ollama { default_model, .. } => {
234 default_model.map(|model| LanguageModelSelection {
235 provider: "ollama".to_string(),
236 model: model.id().to_string(),
237 })
238 }
239 }),
240 inline_alternatives: None,
241 },
242 VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
243 },
244 AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
245 enabled: None,
246 button: settings.button,
247 dock: settings.dock,
248 default_width: settings.default_width,
249 default_height: settings.default_height,
250 default_model: Some(LanguageModelSelection {
251 provider: "openai".to_string(),
252 model: settings
253 .default_open_ai_model
254 .clone()
255 .unwrap_or_default()
256 .id()
257 .to_string(),
258 }),
259 inline_alternatives: None,
260 },
261 }
262 }
263
264 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
265 match self {
266 AssistantSettingsContent::Versioned(settings) => match settings {
267 VersionedAssistantSettingsContent::V1(settings) => {
268 settings.dock = Some(dock);
269 }
270 VersionedAssistantSettingsContent::V2(settings) => {
271 settings.dock = Some(dock);
272 }
273 },
274 AssistantSettingsContent::Legacy(settings) => {
275 settings.dock = Some(dock);
276 }
277 }
278 }
279
280 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
281 let model = language_model.id().0.to_string();
282 let provider = language_model.provider_id().0.to_string();
283
284 match self {
285 AssistantSettingsContent::Versioned(settings) => match settings {
286 VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
287 "zed.dev" => {
288 log::warn!("attempted to set zed.dev model on outdated settings");
289 }
290 "anthropic" => {
291 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
292 Some(AssistantProviderContentV1::Anthropic {
293 api_url,
294 low_speed_timeout_in_seconds,
295 ..
296 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
297 _ => (None, None),
298 };
299 settings.provider = Some(AssistantProviderContentV1::Anthropic {
300 default_model: AnthropicModel::from_id(&model).ok(),
301 api_url,
302 low_speed_timeout_in_seconds,
303 });
304 }
305 "ollama" => {
306 let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
307 Some(AssistantProviderContentV1::Ollama {
308 api_url,
309 low_speed_timeout_in_seconds,
310 ..
311 }) => (api_url.clone(), *low_speed_timeout_in_seconds),
312 _ => (None, None),
313 };
314 settings.provider = Some(AssistantProviderContentV1::Ollama {
315 default_model: Some(ollama::Model::new(&model, None, None)),
316 api_url,
317 low_speed_timeout_in_seconds,
318 });
319 }
320 "openai" => {
321 let (api_url, low_speed_timeout_in_seconds, available_models) =
322 match &settings.provider {
323 Some(AssistantProviderContentV1::OpenAi {
324 api_url,
325 low_speed_timeout_in_seconds,
326 available_models,
327 ..
328 }) => (
329 api_url.clone(),
330 *low_speed_timeout_in_seconds,
331 available_models.clone(),
332 ),
333 _ => (None, None, None),
334 };
335 settings.provider = Some(AssistantProviderContentV1::OpenAi {
336 default_model: OpenAiModel::from_id(&model).ok(),
337 api_url,
338 low_speed_timeout_in_seconds,
339 available_models,
340 });
341 }
342 _ => {}
343 },
344 VersionedAssistantSettingsContent::V2(settings) => {
345 settings.default_model = Some(LanguageModelSelection { provider, model });
346 }
347 },
348 AssistantSettingsContent::Legacy(settings) => {
349 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
350 settings.default_open_ai_model = Some(model);
351 }
352 }
353 }
354 }
355}
356
357#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
358#[serde(tag = "version")]
359pub enum VersionedAssistantSettingsContent {
360 #[serde(rename = "1")]
361 V1(AssistantSettingsContentV1),
362 #[serde(rename = "2")]
363 V2(AssistantSettingsContentV2),
364}
365
366impl Default for VersionedAssistantSettingsContent {
367 fn default() -> Self {
368 Self::V2(AssistantSettingsContentV2 {
369 enabled: None,
370 button: None,
371 dock: None,
372 default_width: None,
373 default_height: None,
374 default_model: None,
375 inline_alternatives: None,
376 })
377 }
378}
379
380#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
381pub struct AssistantSettingsContentV2 {
382 /// Whether the Assistant is enabled.
383 ///
384 /// Default: true
385 enabled: Option<bool>,
386 /// Whether to show the assistant panel button in the status bar.
387 ///
388 /// Default: true
389 button: Option<bool>,
390 /// Where to dock the assistant.
391 ///
392 /// Default: right
393 dock: Option<AssistantDockPosition>,
394 /// Default width in pixels when the assistant is docked to the left or right.
395 ///
396 /// Default: 640
397 default_width: Option<f32>,
398 /// Default height in pixels when the assistant is docked to the bottom.
399 ///
400 /// Default: 320
401 default_height: Option<f32>,
402 /// The default model to use when creating new contexts.
403 default_model: Option<LanguageModelSelection>,
404 /// Additional models with which to generate alternatives when performing inline assists.
405 inline_alternatives: Option<Vec<LanguageModelSelection>>,
406}
407
408#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
409pub struct LanguageModelSelection {
410 #[schemars(schema_with = "providers_schema")]
411 pub provider: String,
412 pub model: String,
413}
414
415fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {
416 schemars::schema::SchemaObject {
417 enum_values: Some(vec![
418 "anthropic".into(),
419 "google".into(),
420 "ollama".into(),
421 "openai".into(),
422 "zed.dev".into(),
423 "copilot_chat".into(),
424 ]),
425 ..Default::default()
426 }
427 .into()
428}
429
430impl Default for LanguageModelSelection {
431 fn default() -> Self {
432 Self {
433 provider: "openai".to_string(),
434 model: "gpt-4".to_string(),
435 }
436 }
437}
438
439#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
440pub struct AssistantSettingsContentV1 {
441 /// Whether the Assistant is enabled.
442 ///
443 /// Default: true
444 enabled: Option<bool>,
445 /// Whether to show the assistant panel button in the status bar.
446 ///
447 /// Default: true
448 button: Option<bool>,
449 /// Where to dock the assistant.
450 ///
451 /// Default: right
452 dock: Option<AssistantDockPosition>,
453 /// Default width in pixels when the assistant is docked to the left or right.
454 ///
455 /// Default: 640
456 default_width: Option<f32>,
457 /// Default height in pixels when the assistant is docked to the bottom.
458 ///
459 /// Default: 320
460 default_height: Option<f32>,
461 /// The provider of the assistant service.
462 ///
463 /// This can be "openai", "anthropic", "ollama", "zed.dev"
464 /// each with their respective default models and configurations.
465 provider: Option<AssistantProviderContentV1>,
466}
467
468#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
469pub struct LegacyAssistantSettingsContent {
470 /// Whether to show the assistant panel button in the status bar.
471 ///
472 /// Default: true
473 pub button: Option<bool>,
474 /// Where to dock the assistant.
475 ///
476 /// Default: right
477 pub dock: Option<AssistantDockPosition>,
478 /// Default width in pixels when the assistant is docked to the left or right.
479 ///
480 /// Default: 640
481 pub default_width: Option<f32>,
482 /// Default height in pixels when the assistant is docked to the bottom.
483 ///
484 /// Default: 320
485 pub default_height: Option<f32>,
486 /// The default OpenAI model to use when creating new contexts.
487 ///
488 /// Default: gpt-4-1106-preview
489 pub default_open_ai_model: Option<OpenAiModel>,
490 /// OpenAI API base URL to use when creating new contexts.
491 ///
492 /// Default: https://api.openai.com/v1
493 pub openai_api_url: Option<String>,
494}
495
496impl Settings for AssistantSettings {
497 const KEY: Option<&'static str> = Some("assistant");
498
499 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
500
501 type FileContent = AssistantSettingsContent;
502
503 fn load(
504 sources: SettingsSources<Self::FileContent>,
505 _: &mut gpui::AppContext,
506 ) -> anyhow::Result<Self> {
507 let mut settings = AssistantSettings::default();
508
509 for value in sources.defaults_and_customizations() {
510 if value.is_version_outdated() {
511 settings.using_outdated_settings_version = true;
512 }
513
514 let value = value.upgrade();
515 merge(&mut settings.enabled, value.enabled);
516 merge(&mut settings.button, value.button);
517 merge(&mut settings.dock, value.dock);
518 merge(
519 &mut settings.default_width,
520 value.default_width.map(Into::into),
521 );
522 merge(
523 &mut settings.default_height,
524 value.default_height.map(Into::into),
525 );
526 merge(&mut settings.default_model, value.default_model);
527 merge(&mut settings.inline_alternatives, value.inline_alternatives);
528 // merge(&mut settings.infer_context, value.infer_context); TODO re-enable this once we ship context inference
529 }
530
531 Ok(settings)
532 }
533}
534
535fn merge<T>(target: &mut T, value: Option<T>) {
536 if let Some(value) = value {
537 *target = value;
538 }
539}
540
541#[cfg(test)]
542mod tests {
543 use gpui::{ReadGlobal, TestAppContext};
544
545 use super::*;
546
547 #[gpui::test]
548 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
549 let fs = fs::FakeFs::new(cx.executor().clone());
550 fs.create_dir(paths::settings_file().parent().unwrap())
551 .await
552 .unwrap();
553
554 cx.update(|cx| {
555 let test_settings = settings::SettingsStore::test(cx);
556 cx.set_global(test_settings);
557 AssistantSettings::register(cx);
558 });
559
560 cx.update(|cx| {
561 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
562 assert_eq!(
563 AssistantSettings::get_global(cx).default_model,
564 LanguageModelSelection {
565 provider: "zed.dev".into(),
566 model: "claude-3-5-sonnet".into(),
567 }
568 );
569 });
570
571 cx.update(|cx| {
572 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
573 fs.clone(),
574 |settings, _| {
575 *settings = AssistantSettingsContent::Versioned(
576 VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
577 default_model: Some(LanguageModelSelection {
578 provider: "test-provider".into(),
579 model: "gpt-99".into(),
580 }),
581 inline_alternatives: None,
582 enabled: None,
583 button: None,
584 dock: None,
585 default_width: None,
586 default_height: None,
587 }),
588 )
589 },
590 );
591 });
592
593 cx.run_until_parked();
594
595 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
596 assert!(raw_settings_value.contains(r#""version": "2""#));
597
598 #[derive(Debug, Deserialize)]
599 struct AssistantSettingsTest {
600 assistant: AssistantSettingsContent,
601 }
602
603 let assistant_settings: AssistantSettingsTest =
604 serde_json_lenient::from_str(&raw_settings_value).unwrap();
605
606 assert!(!assistant_settings.assistant.is_version_outdated());
607 }
608}