1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{JsonSchema, schema::Schema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20pub fn init(cx: &mut App) {
21 AssistantSettings::register(cx);
22}
23
24#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
25#[serde(rename_all = "snake_case")]
26pub enum AssistantDockPosition {
27 Left,
28 #[default]
29 Right,
30 Bottom,
31}
32
33#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
34#[serde(rename_all = "snake_case")]
35pub enum NotifyWhenAgentWaiting {
36 #[default]
37 PrimaryScreen,
38 AllScreens,
39 Never,
40}
41
42#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
43#[serde(tag = "name", rename_all = "snake_case")]
44#[schemars(deny_unknown_fields)]
45pub enum AssistantProviderContentV1 {
46 #[serde(rename = "zed.dev")]
47 ZedDotDev { default_model: Option<CloudModel> },
48 #[serde(rename = "openai")]
49 OpenAi {
50 default_model: Option<OpenAiModel>,
51 api_url: Option<String>,
52 available_models: Option<Vec<OpenAiModel>>,
53 },
54 #[serde(rename = "anthropic")]
55 Anthropic {
56 default_model: Option<AnthropicModel>,
57 api_url: Option<String>,
58 },
59 #[serde(rename = "ollama")]
60 Ollama {
61 default_model: Option<OllamaModel>,
62 api_url: Option<String>,
63 },
64 #[serde(rename = "lmstudio")]
65 LmStudio {
66 default_model: Option<LmStudioModel>,
67 api_url: Option<String>,
68 },
69 #[serde(rename = "deepseek")]
70 DeepSeek {
71 default_model: Option<DeepseekModel>,
72 api_url: Option<String>,
73 },
74}
75
76#[derive(Default, Clone, Debug)]
77pub struct AssistantSettings {
78 pub enabled: bool,
79 pub button: bool,
80 pub dock: AssistantDockPosition,
81 pub default_width: Pixels,
82 pub default_height: Pixels,
83 pub default_model: LanguageModelSelection,
84 pub inline_assistant_model: Option<LanguageModelSelection>,
85 pub commit_message_model: Option<LanguageModelSelection>,
86 pub thread_summary_model: Option<LanguageModelSelection>,
87 pub inline_alternatives: Vec<LanguageModelSelection>,
88 pub using_outdated_settings_version: bool,
89 pub default_profile: AgentProfileId,
90 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
91 pub always_allow_tool_actions: bool,
92 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
93 pub stream_edits: bool,
94 pub single_file_review: bool,
95 pub model_parameters: Vec<LanguageModelParameters>,
96 pub preferred_completion_mode: CompletionMode,
97}
98
99impl AssistantSettings {
100 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
101 let settings = Self::get_global(cx);
102 settings
103 .model_parameters
104 .iter()
105 .rfind(|setting| setting.matches(model))
106 .and_then(|m| m.temperature)
107 }
108
109 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
110 self.inline_assistant_model = Some(LanguageModelSelection {
111 provider: provider.into(),
112 model,
113 });
114 }
115
116 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
117 self.commit_message_model = Some(LanguageModelSelection {
118 provider: provider.into(),
119 model,
120 });
121 }
122
123 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
124 self.thread_summary_model = Some(LanguageModelSelection {
125 provider: provider.into(),
126 model,
127 });
128 }
129}
130
131#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
132pub struct LanguageModelParameters {
133 pub provider: Option<LanguageModelProviderSetting>,
134 pub model: Option<SharedString>,
135 pub temperature: Option<f32>,
136}
137
138impl LanguageModelParameters {
139 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
140 if let Some(provider) = &self.provider {
141 if provider.0 != model.provider_id().0 {
142 return false;
143 }
144 }
145 if let Some(setting_model) = &self.model {
146 if *setting_model != model.id().0 {
147 return false;
148 }
149 }
150 true
151 }
152}
153
154/// Assistant panel settings
155#[derive(Clone, Serialize, Deserialize, Debug, Default)]
156pub struct AssistantSettingsContent {
157 #[serde(flatten)]
158 pub inner: Option<AssistantSettingsContentInner>,
159}
160
161#[derive(Clone, Serialize, Deserialize, Debug)]
162#[serde(untagged)]
163pub enum AssistantSettingsContentInner {
164 Versioned(Box<VersionedAssistantSettingsContent>),
165 Legacy(LegacyAssistantSettingsContent),
166}
167
168impl AssistantSettingsContentInner {
169 fn for_v2(content: AssistantSettingsContentV2) -> Self {
170 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
171 content,
172 )))
173 }
174}
175
176impl JsonSchema for AssistantSettingsContent {
177 fn schema_name() -> String {
178 VersionedAssistantSettingsContent::schema_name()
179 }
180
181 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
182 VersionedAssistantSettingsContent::json_schema(r#gen)
183 }
184
185 fn is_referenceable() -> bool {
186 VersionedAssistantSettingsContent::is_referenceable()
187 }
188}
189
190impl AssistantSettingsContent {
191 pub fn is_version_outdated(&self) -> bool {
192 match &self.inner {
193 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
194 VersionedAssistantSettingsContent::V1(_) => true,
195 VersionedAssistantSettingsContent::V2(_) => false,
196 },
197 Some(AssistantSettingsContentInner::Legacy(_)) => true,
198 None => false,
199 }
200 }
201
202 fn upgrade(&self) -> AssistantSettingsContentV2 {
203 match &self.inner {
204 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
205 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
206 enabled: settings.enabled,
207 button: settings.button,
208 dock: settings.dock,
209 default_width: settings.default_width,
210 default_height: settings.default_width,
211 default_model: settings
212 .provider
213 .clone()
214 .and_then(|provider| match provider {
215 AssistantProviderContentV1::ZedDotDev { default_model } => {
216 default_model.map(|model| LanguageModelSelection {
217 provider: "zed.dev".into(),
218 model: model.id().to_string(),
219 })
220 }
221 AssistantProviderContentV1::OpenAi { default_model, .. } => {
222 default_model.map(|model| LanguageModelSelection {
223 provider: "openai".into(),
224 model: model.id().to_string(),
225 })
226 }
227 AssistantProviderContentV1::Anthropic { default_model, .. } => {
228 default_model.map(|model| LanguageModelSelection {
229 provider: "anthropic".into(),
230 model: model.id().to_string(),
231 })
232 }
233 AssistantProviderContentV1::Ollama { default_model, .. } => {
234 default_model.map(|model| LanguageModelSelection {
235 provider: "ollama".into(),
236 model: model.id().to_string(),
237 })
238 }
239 AssistantProviderContentV1::LmStudio { default_model, .. } => {
240 default_model.map(|model| LanguageModelSelection {
241 provider: "lmstudio".into(),
242 model: model.id().to_string(),
243 })
244 }
245 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
246 default_model.map(|model| LanguageModelSelection {
247 provider: "deepseek".into(),
248 model: model.id().to_string(),
249 })
250 }
251 }),
252 inline_assistant_model: None,
253 commit_message_model: None,
254 thread_summary_model: None,
255 inline_alternatives: None,
256 default_profile: None,
257 profiles: None,
258 always_allow_tool_actions: None,
259 notify_when_agent_waiting: None,
260 stream_edits: None,
261 single_file_review: None,
262 model_parameters: Vec::new(),
263 preferred_completion_mode: None,
264 },
265 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
266 },
267 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
268 enabled: None,
269 button: settings.button,
270 dock: settings.dock,
271 default_width: settings.default_width,
272 default_height: settings.default_height,
273 default_model: Some(LanguageModelSelection {
274 provider: "openai".into(),
275 model: settings
276 .default_open_ai_model
277 .clone()
278 .unwrap_or_default()
279 .id()
280 .to_string(),
281 }),
282 inline_assistant_model: None,
283 commit_message_model: None,
284 thread_summary_model: None,
285 inline_alternatives: None,
286 default_profile: None,
287 profiles: None,
288 always_allow_tool_actions: None,
289 notify_when_agent_waiting: None,
290 stream_edits: None,
291 single_file_review: None,
292 model_parameters: Vec::new(),
293 preferred_completion_mode: None,
294 },
295 None => AssistantSettingsContentV2::default(),
296 }
297 }
298
299 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
300 match &mut self.inner {
301 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
302 VersionedAssistantSettingsContent::V1(ref mut settings) => {
303 settings.dock = Some(dock);
304 }
305 VersionedAssistantSettingsContent::V2(ref mut settings) => {
306 settings.dock = Some(dock);
307 }
308 },
309 Some(AssistantSettingsContentInner::Legacy(settings)) => {
310 settings.dock = Some(dock);
311 }
312 None => {
313 self.inner = Some(AssistantSettingsContentInner::for_v2(
314 AssistantSettingsContentV2 {
315 dock: Some(dock),
316 ..Default::default()
317 },
318 ))
319 }
320 }
321 }
322
323 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
324 let model = language_model.id().0.to_string();
325 let provider = language_model.provider_id().0.to_string();
326
327 match &mut self.inner {
328 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
329 VersionedAssistantSettingsContent::V1(ref mut settings) => {
330 match provider.as_ref() {
331 "zed.dev" => {
332 log::warn!("attempted to set zed.dev model on outdated settings");
333 }
334 "anthropic" => {
335 let api_url = match &settings.provider {
336 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
337 api_url.clone()
338 }
339 _ => None,
340 };
341 settings.provider = Some(AssistantProviderContentV1::Anthropic {
342 default_model: AnthropicModel::from_id(&model).ok(),
343 api_url,
344 });
345 }
346 "ollama" => {
347 let api_url = match &settings.provider {
348 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
349 api_url.clone()
350 }
351 _ => None,
352 };
353 settings.provider = Some(AssistantProviderContentV1::Ollama {
354 default_model: Some(ollama::Model::new(
355 &model,
356 None,
357 None,
358 Some(language_model.supports_tools()),
359 )),
360 api_url,
361 });
362 }
363 "lmstudio" => {
364 let api_url = match &settings.provider {
365 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
366 api_url.clone()
367 }
368 _ => None,
369 };
370 settings.provider = Some(AssistantProviderContentV1::LmStudio {
371 default_model: Some(lmstudio::Model::new(&model, None, None)),
372 api_url,
373 });
374 }
375 "openai" => {
376 let (api_url, available_models) = match &settings.provider {
377 Some(AssistantProviderContentV1::OpenAi {
378 api_url,
379 available_models,
380 ..
381 }) => (api_url.clone(), available_models.clone()),
382 _ => (None, None),
383 };
384 settings.provider = Some(AssistantProviderContentV1::OpenAi {
385 default_model: OpenAiModel::from_id(&model).ok(),
386 api_url,
387 available_models,
388 });
389 }
390 "deepseek" => {
391 let api_url = match &settings.provider {
392 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
393 api_url.clone()
394 }
395 _ => None,
396 };
397 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
398 default_model: DeepseekModel::from_id(&model).ok(),
399 api_url,
400 });
401 }
402 _ => {}
403 }
404 }
405 VersionedAssistantSettingsContent::V2(ref mut settings) => {
406 settings.default_model = Some(LanguageModelSelection {
407 provider: provider.into(),
408 model,
409 });
410 }
411 },
412 Some(AssistantSettingsContentInner::Legacy(settings)) => {
413 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
414 settings.default_open_ai_model = Some(model);
415 }
416 }
417 None => {
418 self.inner = Some(AssistantSettingsContentInner::for_v2(
419 AssistantSettingsContentV2 {
420 default_model: Some(LanguageModelSelection {
421 provider: provider.into(),
422 model,
423 }),
424 ..Default::default()
425 },
426 ));
427 }
428 }
429 }
430
431 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
432 self.v2_setting(|setting| {
433 setting.inline_assistant_model = Some(LanguageModelSelection {
434 provider: provider.into(),
435 model,
436 });
437 Ok(())
438 })
439 .ok();
440 }
441
442 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
443 self.v2_setting(|setting| {
444 setting.commit_message_model = Some(LanguageModelSelection {
445 provider: provider.into(),
446 model,
447 });
448 Ok(())
449 })
450 .ok();
451 }
452
453 pub fn v2_setting(
454 &mut self,
455 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
456 ) -> anyhow::Result<()> {
457 match self.inner.get_or_insert_with(|| {
458 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
459 ..Default::default()
460 })
461 }) {
462 AssistantSettingsContentInner::Versioned(boxed) => {
463 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
464 f(settings)
465 } else {
466 Ok(())
467 }
468 }
469 _ => Ok(()),
470 }
471 }
472
473 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
474 self.v2_setting(|setting| {
475 setting.thread_summary_model = Some(LanguageModelSelection {
476 provider: provider.into(),
477 model,
478 });
479 Ok(())
480 })
481 .ok();
482 }
483
484 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
485 self.v2_setting(|setting| {
486 setting.always_allow_tool_actions = Some(allow);
487 Ok(())
488 })
489 .ok();
490 }
491
492 pub fn set_single_file_review(&mut self, allow: bool) {
493 self.v2_setting(|setting| {
494 setting.single_file_review = Some(allow);
495 Ok(())
496 })
497 .ok();
498 }
499
500 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
501 self.v2_setting(|setting| {
502 setting.default_profile = Some(profile_id);
503 Ok(())
504 })
505 .ok();
506 }
507
508 pub fn create_profile(
509 &mut self,
510 profile_id: AgentProfileId,
511 profile: AgentProfile,
512 ) -> Result<()> {
513 self.v2_setting(|settings| {
514 let profiles = settings.profiles.get_or_insert_default();
515 if profiles.contains_key(&profile_id) {
516 bail!("profile with ID '{profile_id}' already exists");
517 }
518
519 profiles.insert(
520 profile_id,
521 AgentProfileContent {
522 name: profile.name.into(),
523 tools: profile.tools,
524 enable_all_context_servers: Some(profile.enable_all_context_servers),
525 context_servers: profile
526 .context_servers
527 .into_iter()
528 .map(|(server_id, preset)| {
529 (
530 server_id,
531 ContextServerPresetContent {
532 tools: preset.tools,
533 },
534 )
535 })
536 .collect(),
537 },
538 );
539
540 Ok(())
541 })
542 }
543}
544
545#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
546#[serde(tag = "version")]
547#[schemars(deny_unknown_fields)]
548pub enum VersionedAssistantSettingsContent {
549 #[serde(rename = "1")]
550 V1(AssistantSettingsContentV1),
551 #[serde(rename = "2")]
552 V2(AssistantSettingsContentV2),
553}
554
555impl Default for VersionedAssistantSettingsContent {
556 fn default() -> Self {
557 Self::V2(AssistantSettingsContentV2 {
558 enabled: None,
559 button: None,
560 dock: None,
561 default_width: None,
562 default_height: None,
563 default_model: None,
564 inline_assistant_model: None,
565 commit_message_model: None,
566 thread_summary_model: None,
567 inline_alternatives: None,
568 default_profile: None,
569 profiles: None,
570 always_allow_tool_actions: None,
571 notify_when_agent_waiting: None,
572 stream_edits: None,
573 single_file_review: None,
574 model_parameters: Vec::new(),
575 preferred_completion_mode: None,
576 })
577 }
578}
579
580#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
581#[schemars(deny_unknown_fields)]
582pub struct AssistantSettingsContentV2 {
583 /// Whether the Assistant is enabled.
584 ///
585 /// Default: true
586 enabled: Option<bool>,
587 /// Whether to show the assistant panel button in the status bar.
588 ///
589 /// Default: true
590 button: Option<bool>,
591 /// Where to dock the assistant.
592 ///
593 /// Default: right
594 dock: Option<AssistantDockPosition>,
595 /// Default width in pixels when the assistant is docked to the left or right.
596 ///
597 /// Default: 640
598 default_width: Option<f32>,
599 /// Default height in pixels when the assistant is docked to the bottom.
600 ///
601 /// Default: 320
602 default_height: Option<f32>,
603 /// The default model to use when creating new chats and for other features when a specific model is not specified.
604 default_model: Option<LanguageModelSelection>,
605 /// Model to use for the inline assistant. Defaults to default_model when not specified.
606 inline_assistant_model: Option<LanguageModelSelection>,
607 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
608 commit_message_model: Option<LanguageModelSelection>,
609 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
610 thread_summary_model: Option<LanguageModelSelection>,
611 /// Additional models with which to generate alternatives when performing inline assists.
612 inline_alternatives: Option<Vec<LanguageModelSelection>>,
613 /// The default profile to use in the Agent.
614 ///
615 /// Default: write
616 default_profile: Option<AgentProfileId>,
617 /// The available agent profiles.
618 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
619 /// Whenever a tool action would normally wait for your confirmation
620 /// that you allow it, always choose to allow it.
621 ///
622 /// Default: false
623 always_allow_tool_actions: Option<bool>,
624 /// Where to show a popup notification when the agent is waiting for user input.
625 ///
626 /// Default: "primary_screen"
627 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
628 /// Whether to stream edits from the agent as they are received.
629 ///
630 /// Default: false
631 stream_edits: Option<bool>,
632 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
633 ///
634 /// Default: true
635 single_file_review: Option<bool>,
636 /// Additional parameters for language model requests. When making a request
637 /// to a model, parameters will be taken from the last entry in this list
638 /// that matches the model's provider and name. In each entry, both provider
639 /// and model are optional, so that you can specify parameters for either
640 /// one.
641 ///
642 /// Default: []
643 #[serde(default)]
644 model_parameters: Vec<LanguageModelParameters>,
645
646 /// What completion mode to enable for new threads
647 ///
648 /// Default: normal
649 preferred_completion_mode: Option<CompletionMode>,
650}
651
652#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
653#[serde(rename_all = "snake_case")]
654pub enum CompletionMode {
655 #[default]
656 Normal,
657 Max,
658}
659
660impl From<CompletionMode> for zed_llm_client::CompletionMode {
661 fn from(value: CompletionMode) -> Self {
662 match value {
663 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
664 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
665 }
666 }
667}
668
669#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
670pub struct LanguageModelSelection {
671 pub provider: LanguageModelProviderSetting,
672 pub model: String,
673}
674
675#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
676pub struct LanguageModelProviderSetting(pub String);
677
678impl JsonSchema for LanguageModelProviderSetting {
679 fn schema_name() -> String {
680 "LanguageModelProviderSetting".into()
681 }
682
683 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
684 schemars::schema::SchemaObject {
685 enum_values: Some(vec![
686 "anthropic".into(),
687 "bedrock".into(),
688 "google".into(),
689 "lmstudio".into(),
690 "ollama".into(),
691 "openai".into(),
692 "zed.dev".into(),
693 "copilot_chat".into(),
694 "deepseek".into(),
695 ]),
696 ..Default::default()
697 }
698 .into()
699 }
700}
701
702impl From<String> for LanguageModelProviderSetting {
703 fn from(provider: String) -> Self {
704 Self(provider)
705 }
706}
707
708impl From<&str> for LanguageModelProviderSetting {
709 fn from(provider: &str) -> Self {
710 Self(provider.to_string())
711 }
712}
713
714impl Default for LanguageModelSelection {
715 fn default() -> Self {
716 Self {
717 provider: LanguageModelProviderSetting("openai".to_string()),
718 model: "gpt-4".to_string(),
719 }
720 }
721}
722
723#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
724pub struct AgentProfileContent {
725 pub name: Arc<str>,
726 #[serde(default)]
727 pub tools: IndexMap<Arc<str>, bool>,
728 /// Whether all context servers are enabled by default.
729 pub enable_all_context_servers: Option<bool>,
730 #[serde(default)]
731 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
732}
733
734#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
735pub struct ContextServerPresetContent {
736 pub tools: IndexMap<Arc<str>, bool>,
737}
738
739#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
740#[schemars(deny_unknown_fields)]
741pub struct AssistantSettingsContentV1 {
742 /// Whether the Assistant is enabled.
743 ///
744 /// Default: true
745 enabled: Option<bool>,
746 /// Whether to show the assistant panel button in the status bar.
747 ///
748 /// Default: true
749 button: Option<bool>,
750 /// Where to dock the assistant.
751 ///
752 /// Default: right
753 dock: Option<AssistantDockPosition>,
754 /// Default width in pixels when the assistant is docked to the left or right.
755 ///
756 /// Default: 640
757 default_width: Option<f32>,
758 /// Default height in pixels when the assistant is docked to the bottom.
759 ///
760 /// Default: 320
761 default_height: Option<f32>,
762 /// The provider of the assistant service.
763 ///
764 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
765 /// each with their respective default models and configurations.
766 provider: Option<AssistantProviderContentV1>,
767}
768
769#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
770#[schemars(deny_unknown_fields)]
771pub struct LegacyAssistantSettingsContent {
772 /// Whether to show the assistant panel button in the status bar.
773 ///
774 /// Default: true
775 pub button: Option<bool>,
776 /// Where to dock the assistant.
777 ///
778 /// Default: right
779 pub dock: Option<AssistantDockPosition>,
780 /// Default width in pixels when the assistant is docked to the left or right.
781 ///
782 /// Default: 640
783 pub default_width: Option<f32>,
784 /// Default height in pixels when the assistant is docked to the bottom.
785 ///
786 /// Default: 320
787 pub default_height: Option<f32>,
788 /// The default OpenAI model to use when creating new chats.
789 ///
790 /// Default: gpt-4-1106-preview
791 pub default_open_ai_model: Option<OpenAiModel>,
792 /// OpenAI API base URL to use when creating new chats.
793 ///
794 /// Default: <https://api.openai.com/v1>
795 pub openai_api_url: Option<String>,
796}
797
798impl Settings for AssistantSettings {
799 const KEY: Option<&'static str> = Some("agent");
800
801 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
802
803 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
804
805 type FileContent = AssistantSettingsContent;
806
807 fn load(
808 sources: SettingsSources<Self::FileContent>,
809 _: &mut gpui::App,
810 ) -> anyhow::Result<Self> {
811 let mut settings = AssistantSettings::default();
812
813 for value in sources.defaults_and_customizations() {
814 if value.is_version_outdated() {
815 settings.using_outdated_settings_version = true;
816 }
817
818 let value = value.upgrade();
819 merge(&mut settings.enabled, value.enabled);
820 merge(&mut settings.button, value.button);
821 merge(&mut settings.dock, value.dock);
822 merge(
823 &mut settings.default_width,
824 value.default_width.map(Into::into),
825 );
826 merge(
827 &mut settings.default_height,
828 value.default_height.map(Into::into),
829 );
830 merge(&mut settings.default_model, value.default_model);
831 settings.inline_assistant_model = value
832 .inline_assistant_model
833 .or(settings.inline_assistant_model.take());
834 settings.commit_message_model = value
835 .commit_message_model
836 .or(settings.commit_message_model.take());
837 settings.thread_summary_model = value
838 .thread_summary_model
839 .or(settings.thread_summary_model.take());
840 merge(&mut settings.inline_alternatives, value.inline_alternatives);
841 merge(
842 &mut settings.always_allow_tool_actions,
843 value.always_allow_tool_actions,
844 );
845 merge(
846 &mut settings.notify_when_agent_waiting,
847 value.notify_when_agent_waiting,
848 );
849 merge(&mut settings.stream_edits, value.stream_edits);
850 merge(&mut settings.single_file_review, value.single_file_review);
851 merge(&mut settings.default_profile, value.default_profile);
852 merge(
853 &mut settings.preferred_completion_mode,
854 value.preferred_completion_mode,
855 );
856
857 settings
858 .model_parameters
859 .extend_from_slice(&value.model_parameters);
860
861 if let Some(profiles) = value.profiles {
862 settings
863 .profiles
864 .extend(profiles.into_iter().map(|(id, profile)| {
865 (
866 id,
867 AgentProfile {
868 name: profile.name.into(),
869 tools: profile.tools,
870 enable_all_context_servers: profile
871 .enable_all_context_servers
872 .unwrap_or_default(),
873 context_servers: profile
874 .context_servers
875 .into_iter()
876 .map(|(context_server_id, preset)| {
877 (
878 context_server_id,
879 ContextServerPreset {
880 tools: preset.tools.clone(),
881 },
882 )
883 })
884 .collect(),
885 },
886 )
887 }));
888 }
889 }
890
891 Ok(settings)
892 }
893
894 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
895 if let Some(b) = vscode
896 .read_value("chat.agent.enabled")
897 .and_then(|b| b.as_bool())
898 {
899 match &mut current.inner {
900 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
901 match versioned.as_mut() {
902 VersionedAssistantSettingsContent::V1(setting) => {
903 setting.enabled = Some(b);
904 setting.button = Some(b);
905 }
906
907 VersionedAssistantSettingsContent::V2(setting) => {
908 setting.enabled = Some(b);
909 setting.button = Some(b);
910 }
911 }
912 }
913 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
914 None => {
915 current.inner = Some(AssistantSettingsContentInner::for_v2(
916 AssistantSettingsContentV2 {
917 enabled: Some(b),
918 button: Some(b),
919 ..Default::default()
920 },
921 ));
922 }
923 }
924 }
925 }
926}
927
928fn merge<T>(target: &mut T, value: Option<T>) {
929 if let Some(value) = value {
930 *target = value;
931 }
932}
933
934#[cfg(test)]
935mod tests {
936 use fs::Fs;
937 use gpui::{ReadGlobal, TestAppContext};
938 use settings::SettingsStore;
939
940 use super::*;
941
942 #[gpui::test]
943 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
944 let fs = fs::FakeFs::new(cx.executor().clone());
945 fs.create_dir(paths::settings_file().parent().unwrap())
946 .await
947 .unwrap();
948
949 cx.update(|cx| {
950 let test_settings = settings::SettingsStore::test(cx);
951 cx.set_global(test_settings);
952 AssistantSettings::register(cx);
953 });
954
955 cx.update(|cx| {
956 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
957 assert_eq!(
958 AssistantSettings::get_global(cx).default_model,
959 LanguageModelSelection {
960 provider: "zed.dev".into(),
961 model: "claude-3-7-sonnet-latest".into(),
962 }
963 );
964 });
965
966 cx.update(|cx| {
967 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
968 fs.clone(),
969 |settings, _| {
970 *settings = AssistantSettingsContent {
971 inner: Some(AssistantSettingsContentInner::for_v2(
972 AssistantSettingsContentV2 {
973 default_model: Some(LanguageModelSelection {
974 provider: "test-provider".into(),
975 model: "gpt-99".into(),
976 }),
977 inline_assistant_model: None,
978 commit_message_model: None,
979 thread_summary_model: None,
980 inline_alternatives: None,
981 enabled: None,
982 button: None,
983 dock: None,
984 default_width: None,
985 default_height: None,
986 default_profile: None,
987 profiles: None,
988 always_allow_tool_actions: None,
989 notify_when_agent_waiting: None,
990 stream_edits: None,
991 single_file_review: None,
992 model_parameters: Vec::new(),
993 preferred_completion_mode: None,
994 },
995 )),
996 }
997 },
998 );
999 });
1000
1001 cx.run_until_parked();
1002
1003 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1004 assert!(raw_settings_value.contains(r#""version": "2""#));
1005
1006 #[derive(Debug, Deserialize)]
1007 struct AssistantSettingsTest {
1008 agent: AssistantSettingsContent,
1009 }
1010
1011 let assistant_settings: AssistantSettingsTest =
1012 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1013
1014 assert!(!assistant_settings.agent.is_version_outdated());
1015 }
1016
1017 #[gpui::test]
1018 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1019 let fs = fs::FakeFs::new(cx.executor().clone());
1020 fs.create_dir(paths::settings_file().parent().unwrap())
1021 .await
1022 .unwrap();
1023
1024 cx.update(|cx| {
1025 let mut test_settings = settings::SettingsStore::test(cx);
1026 let user_settings_content = r#"{
1027 "assistant": {
1028 "enabled": true,
1029 "version": "2",
1030 "default_model": {
1031 "provider": "zed.dev",
1032 "model": "gpt-99"
1033 },
1034 }}"#;
1035 test_settings
1036 .set_user_settings(user_settings_content, cx)
1037 .unwrap();
1038 cx.set_global(test_settings);
1039 AssistantSettings::register(cx);
1040 });
1041
1042 cx.run_until_parked();
1043
1044 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1045 assert!(assistant_settings.enabled);
1046 assert!(!assistant_settings.using_outdated_settings_version);
1047 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1048
1049 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1050 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1051 *settings = AssistantSettingsContent {
1052 inner: Some(AssistantSettingsContentInner::for_v2(
1053 AssistantSettingsContentV2 {
1054 enabled: Some(false),
1055 default_model: Some(LanguageModelSelection {
1056 provider: "xai".to_owned().into(),
1057 model: "grok".to_owned(),
1058 }),
1059 ..Default::default()
1060 },
1061 )),
1062 };
1063 });
1064 });
1065
1066 cx.run_until_parked();
1067
1068 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1069
1070 #[derive(Debug, Deserialize)]
1071 struct AssistantSettingsTest {
1072 assistant: AssistantSettingsContent,
1073 agent: Option<serde_json_lenient::Value>,
1074 }
1075
1076 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1077 assert!(assistant_settings.agent.is_none());
1078 }
1079}