1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::{CloudModel, LanguageModel};
12use lmstudio::Model as LmStudioModel;
13use ollama::Model as OllamaModel;
14use schemars::{JsonSchema, schema::Schema};
15use serde::{Deserialize, Serialize};
16use settings::{Settings, SettingsSources};
17
18pub use crate::agent_profile::*;
19
20pub fn init(cx: &mut App) {
21 AssistantSettings::register(cx);
22}
23
24#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
25#[serde(rename_all = "snake_case")]
26pub enum AssistantDockPosition {
27 Left,
28 #[default]
29 Right,
30 Bottom,
31}
32
33#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
34#[serde(rename_all = "snake_case")]
35pub enum NotifyWhenAgentWaiting {
36 #[default]
37 PrimaryScreen,
38 AllScreens,
39 Never,
40}
41
42#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
43#[serde(tag = "name", rename_all = "snake_case")]
44#[schemars(deny_unknown_fields)]
45pub enum AssistantProviderContentV1 {
46 #[serde(rename = "zed.dev")]
47 ZedDotDev { default_model: Option<CloudModel> },
48 #[serde(rename = "openai")]
49 OpenAi {
50 default_model: Option<OpenAiModel>,
51 api_url: Option<String>,
52 available_models: Option<Vec<OpenAiModel>>,
53 },
54 #[serde(rename = "anthropic")]
55 Anthropic {
56 default_model: Option<AnthropicModel>,
57 api_url: Option<String>,
58 },
59 #[serde(rename = "ollama")]
60 Ollama {
61 default_model: Option<OllamaModel>,
62 api_url: Option<String>,
63 },
64 #[serde(rename = "lmstudio")]
65 LmStudio {
66 default_model: Option<LmStudioModel>,
67 api_url: Option<String>,
68 },
69 #[serde(rename = "deepseek")]
70 DeepSeek {
71 default_model: Option<DeepseekModel>,
72 api_url: Option<String>,
73 },
74}
75
76#[derive(Default, Clone, Debug)]
77pub struct AssistantSettings {
78 pub enabled: bool,
79 pub button: bool,
80 pub dock: AssistantDockPosition,
81 pub default_width: Pixels,
82 pub default_height: Pixels,
83 pub default_model: LanguageModelSelection,
84 pub inline_assistant_model: Option<LanguageModelSelection>,
85 pub commit_message_model: Option<LanguageModelSelection>,
86 pub thread_summary_model: Option<LanguageModelSelection>,
87 pub inline_alternatives: Vec<LanguageModelSelection>,
88 pub using_outdated_settings_version: bool,
89 pub default_profile: AgentProfileId,
90 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
91 pub always_allow_tool_actions: bool,
92 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
93 pub stream_edits: bool,
94 pub single_file_review: bool,
95 pub model_parameters: Vec<LanguageModelParameters>,
96 pub preferred_completion_mode: CompletionMode,
97 pub enable_feedback: bool,
98}
99
100impl AssistantSettings {
101 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
102 let settings = Self::get_global(cx);
103 settings
104 .model_parameters
105 .iter()
106 .rfind(|setting| setting.matches(model))
107 .and_then(|m| m.temperature)
108 }
109
110 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
111 self.inline_assistant_model = Some(LanguageModelSelection {
112 provider: provider.into(),
113 model,
114 });
115 }
116
117 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
118 self.commit_message_model = Some(LanguageModelSelection {
119 provider: provider.into(),
120 model,
121 });
122 }
123
124 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
125 self.thread_summary_model = Some(LanguageModelSelection {
126 provider: provider.into(),
127 model,
128 });
129 }
130}
131
132#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
133pub struct LanguageModelParameters {
134 pub provider: Option<LanguageModelProviderSetting>,
135 pub model: Option<SharedString>,
136 pub temperature: Option<f32>,
137}
138
139impl LanguageModelParameters {
140 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
141 if let Some(provider) = &self.provider {
142 if provider.0 != model.provider_id().0 {
143 return false;
144 }
145 }
146 if let Some(setting_model) = &self.model {
147 if *setting_model != model.id().0 {
148 return false;
149 }
150 }
151 true
152 }
153}
154
155/// Assistant panel settings
156#[derive(Clone, Serialize, Deserialize, Debug, Default)]
157pub struct AssistantSettingsContent {
158 #[serde(flatten)]
159 pub inner: Option<AssistantSettingsContentInner>,
160}
161
162#[derive(Clone, Serialize, Deserialize, Debug)]
163#[serde(untagged)]
164pub enum AssistantSettingsContentInner {
165 Versioned(Box<VersionedAssistantSettingsContent>),
166 Legacy(LegacyAssistantSettingsContent),
167}
168
169impl AssistantSettingsContentInner {
170 fn for_v2(content: AssistantSettingsContentV2) -> Self {
171 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
172 content,
173 )))
174 }
175}
176
177impl JsonSchema for AssistantSettingsContent {
178 fn schema_name() -> String {
179 VersionedAssistantSettingsContent::schema_name()
180 }
181
182 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
183 VersionedAssistantSettingsContent::json_schema(r#gen)
184 }
185
186 fn is_referenceable() -> bool {
187 VersionedAssistantSettingsContent::is_referenceable()
188 }
189}
190
191impl AssistantSettingsContent {
192 pub fn is_version_outdated(&self) -> bool {
193 match &self.inner {
194 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
195 VersionedAssistantSettingsContent::V1(_) => true,
196 VersionedAssistantSettingsContent::V2(_) => false,
197 },
198 Some(AssistantSettingsContentInner::Legacy(_)) => true,
199 None => false,
200 }
201 }
202
203 fn upgrade(&self) -> AssistantSettingsContentV2 {
204 match &self.inner {
205 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
206 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
207 enabled: settings.enabled,
208 button: settings.button,
209 dock: settings.dock,
210 default_width: settings.default_width,
211 default_height: settings.default_width,
212 default_model: settings
213 .provider
214 .clone()
215 .and_then(|provider| match provider {
216 AssistantProviderContentV1::ZedDotDev { default_model } => {
217 default_model.map(|model| LanguageModelSelection {
218 provider: "zed.dev".into(),
219 model: model.id().to_string(),
220 })
221 }
222 AssistantProviderContentV1::OpenAi { default_model, .. } => {
223 default_model.map(|model| LanguageModelSelection {
224 provider: "openai".into(),
225 model: model.id().to_string(),
226 })
227 }
228 AssistantProviderContentV1::Anthropic { default_model, .. } => {
229 default_model.map(|model| LanguageModelSelection {
230 provider: "anthropic".into(),
231 model: model.id().to_string(),
232 })
233 }
234 AssistantProviderContentV1::Ollama { default_model, .. } => {
235 default_model.map(|model| LanguageModelSelection {
236 provider: "ollama".into(),
237 model: model.id().to_string(),
238 })
239 }
240 AssistantProviderContentV1::LmStudio { default_model, .. } => {
241 default_model.map(|model| LanguageModelSelection {
242 provider: "lmstudio".into(),
243 model: model.id().to_string(),
244 })
245 }
246 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
247 default_model.map(|model| LanguageModelSelection {
248 provider: "deepseek".into(),
249 model: model.id().to_string(),
250 })
251 }
252 }),
253 inline_assistant_model: None,
254 commit_message_model: None,
255 thread_summary_model: None,
256 inline_alternatives: None,
257 default_profile: None,
258 profiles: None,
259 always_allow_tool_actions: None,
260 notify_when_agent_waiting: None,
261 stream_edits: None,
262 single_file_review: None,
263 model_parameters: Vec::new(),
264 preferred_completion_mode: None,
265 enable_feedback: None,
266 },
267 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
268 },
269 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
270 enabled: None,
271 button: settings.button,
272 dock: settings.dock,
273 default_width: settings.default_width,
274 default_height: settings.default_height,
275 default_model: Some(LanguageModelSelection {
276 provider: "openai".into(),
277 model: settings
278 .default_open_ai_model
279 .clone()
280 .unwrap_or_default()
281 .id()
282 .to_string(),
283 }),
284 inline_assistant_model: None,
285 commit_message_model: None,
286 thread_summary_model: None,
287 inline_alternatives: None,
288 default_profile: None,
289 profiles: None,
290 always_allow_tool_actions: None,
291 notify_when_agent_waiting: None,
292 stream_edits: None,
293 single_file_review: None,
294 model_parameters: Vec::new(),
295 preferred_completion_mode: None,
296 enable_feedback: None,
297 },
298 None => AssistantSettingsContentV2::default(),
299 }
300 }
301
302 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
303 match &mut self.inner {
304 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
305 VersionedAssistantSettingsContent::V1(ref mut settings) => {
306 settings.dock = Some(dock);
307 }
308 VersionedAssistantSettingsContent::V2(ref mut settings) => {
309 settings.dock = Some(dock);
310 }
311 },
312 Some(AssistantSettingsContentInner::Legacy(settings)) => {
313 settings.dock = Some(dock);
314 }
315 None => {
316 self.inner = Some(AssistantSettingsContentInner::for_v2(
317 AssistantSettingsContentV2 {
318 dock: Some(dock),
319 ..Default::default()
320 },
321 ))
322 }
323 }
324 }
325
326 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
327 let model = language_model.id().0.to_string();
328 let provider = language_model.provider_id().0.to_string();
329
330 match &mut self.inner {
331 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
332 VersionedAssistantSettingsContent::V1(ref mut settings) => {
333 match provider.as_ref() {
334 "zed.dev" => {
335 log::warn!("attempted to set zed.dev model on outdated settings");
336 }
337 "anthropic" => {
338 let api_url = match &settings.provider {
339 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
340 api_url.clone()
341 }
342 _ => None,
343 };
344 settings.provider = Some(AssistantProviderContentV1::Anthropic {
345 default_model: AnthropicModel::from_id(&model).ok(),
346 api_url,
347 });
348 }
349 "ollama" => {
350 let api_url = match &settings.provider {
351 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
352 api_url.clone()
353 }
354 _ => None,
355 };
356 settings.provider = Some(AssistantProviderContentV1::Ollama {
357 default_model: Some(ollama::Model::new(
358 &model,
359 None,
360 None,
361 Some(language_model.supports_tools()),
362 )),
363 api_url,
364 });
365 }
366 "lmstudio" => {
367 let api_url = match &settings.provider {
368 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
369 api_url.clone()
370 }
371 _ => None,
372 };
373 settings.provider = Some(AssistantProviderContentV1::LmStudio {
374 default_model: Some(lmstudio::Model::new(&model, None, None)),
375 api_url,
376 });
377 }
378 "openai" => {
379 let (api_url, available_models) = match &settings.provider {
380 Some(AssistantProviderContentV1::OpenAi {
381 api_url,
382 available_models,
383 ..
384 }) => (api_url.clone(), available_models.clone()),
385 _ => (None, None),
386 };
387 settings.provider = Some(AssistantProviderContentV1::OpenAi {
388 default_model: OpenAiModel::from_id(&model).ok(),
389 api_url,
390 available_models,
391 });
392 }
393 "deepseek" => {
394 let api_url = match &settings.provider {
395 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
396 api_url.clone()
397 }
398 _ => None,
399 };
400 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
401 default_model: DeepseekModel::from_id(&model).ok(),
402 api_url,
403 });
404 }
405 _ => {}
406 }
407 }
408 VersionedAssistantSettingsContent::V2(ref mut settings) => {
409 settings.default_model = Some(LanguageModelSelection {
410 provider: provider.into(),
411 model,
412 });
413 }
414 },
415 Some(AssistantSettingsContentInner::Legacy(settings)) => {
416 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
417 settings.default_open_ai_model = Some(model);
418 }
419 }
420 None => {
421 self.inner = Some(AssistantSettingsContentInner::for_v2(
422 AssistantSettingsContentV2 {
423 default_model: Some(LanguageModelSelection {
424 provider: provider.into(),
425 model,
426 }),
427 ..Default::default()
428 },
429 ));
430 }
431 }
432 }
433
434 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
435 self.v2_setting(|setting| {
436 setting.inline_assistant_model = Some(LanguageModelSelection {
437 provider: provider.into(),
438 model,
439 });
440 Ok(())
441 })
442 .ok();
443 }
444
445 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
446 self.v2_setting(|setting| {
447 setting.commit_message_model = Some(LanguageModelSelection {
448 provider: provider.into(),
449 model,
450 });
451 Ok(())
452 })
453 .ok();
454 }
455
456 pub fn v2_setting(
457 &mut self,
458 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
459 ) -> anyhow::Result<()> {
460 match self.inner.get_or_insert_with(|| {
461 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
462 ..Default::default()
463 })
464 }) {
465 AssistantSettingsContentInner::Versioned(boxed) => {
466 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
467 f(settings)
468 } else {
469 Ok(())
470 }
471 }
472 _ => Ok(()),
473 }
474 }
475
476 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
477 self.v2_setting(|setting| {
478 setting.thread_summary_model = Some(LanguageModelSelection {
479 provider: provider.into(),
480 model,
481 });
482 Ok(())
483 })
484 .ok();
485 }
486
487 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
488 self.v2_setting(|setting| {
489 setting.always_allow_tool_actions = Some(allow);
490 Ok(())
491 })
492 .ok();
493 }
494
495 pub fn set_single_file_review(&mut self, allow: bool) {
496 self.v2_setting(|setting| {
497 setting.single_file_review = Some(allow);
498 Ok(())
499 })
500 .ok();
501 }
502
503 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
504 self.v2_setting(|setting| {
505 setting.default_profile = Some(profile_id);
506 Ok(())
507 })
508 .ok();
509 }
510
511 pub fn create_profile(
512 &mut self,
513 profile_id: AgentProfileId,
514 profile: AgentProfile,
515 ) -> Result<()> {
516 self.v2_setting(|settings| {
517 let profiles = settings.profiles.get_or_insert_default();
518 if profiles.contains_key(&profile_id) {
519 bail!("profile with ID '{profile_id}' already exists");
520 }
521
522 profiles.insert(
523 profile_id,
524 AgentProfileContent {
525 name: profile.name.into(),
526 tools: profile.tools,
527 enable_all_context_servers: Some(profile.enable_all_context_servers),
528 context_servers: profile
529 .context_servers
530 .into_iter()
531 .map(|(server_id, preset)| {
532 (
533 server_id,
534 ContextServerPresetContent {
535 tools: preset.tools,
536 },
537 )
538 })
539 .collect(),
540 },
541 );
542
543 Ok(())
544 })
545 }
546}
547
548#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
549#[serde(tag = "version")]
550#[schemars(deny_unknown_fields)]
551pub enum VersionedAssistantSettingsContent {
552 #[serde(rename = "1")]
553 V1(AssistantSettingsContentV1),
554 #[serde(rename = "2")]
555 V2(AssistantSettingsContentV2),
556}
557
558impl Default for VersionedAssistantSettingsContent {
559 fn default() -> Self {
560 Self::V2(AssistantSettingsContentV2 {
561 enabled: None,
562 button: None,
563 dock: None,
564 default_width: None,
565 default_height: None,
566 default_model: None,
567 inline_assistant_model: None,
568 commit_message_model: None,
569 thread_summary_model: None,
570 inline_alternatives: None,
571 default_profile: None,
572 profiles: None,
573 always_allow_tool_actions: None,
574 notify_when_agent_waiting: None,
575 stream_edits: None,
576 single_file_review: None,
577 model_parameters: Vec::new(),
578 preferred_completion_mode: None,
579 enable_feedback: None,
580 })
581 }
582}
583
584#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
585#[schemars(deny_unknown_fields)]
586pub struct AssistantSettingsContentV2 {
587 /// Whether the Assistant is enabled.
588 ///
589 /// Default: true
590 enabled: Option<bool>,
591 /// Whether to show the assistant panel button in the status bar.
592 ///
593 /// Default: true
594 button: Option<bool>,
595 /// Where to dock the assistant.
596 ///
597 /// Default: right
598 dock: Option<AssistantDockPosition>,
599 /// Default width in pixels when the assistant is docked to the left or right.
600 ///
601 /// Default: 640
602 default_width: Option<f32>,
603 /// Default height in pixels when the assistant is docked to the bottom.
604 ///
605 /// Default: 320
606 default_height: Option<f32>,
607 /// The default model to use when creating new chats and for other features when a specific model is not specified.
608 default_model: Option<LanguageModelSelection>,
609 /// Model to use for the inline assistant. Defaults to default_model when not specified.
610 inline_assistant_model: Option<LanguageModelSelection>,
611 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
612 commit_message_model: Option<LanguageModelSelection>,
613 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
614 thread_summary_model: Option<LanguageModelSelection>,
615 /// Additional models with which to generate alternatives when performing inline assists.
616 inline_alternatives: Option<Vec<LanguageModelSelection>>,
617 /// The default profile to use in the Agent.
618 ///
619 /// Default: write
620 default_profile: Option<AgentProfileId>,
621 /// The available agent profiles.
622 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
623 /// Whenever a tool action would normally wait for your confirmation
624 /// that you allow it, always choose to allow it.
625 ///
626 /// Default: false
627 always_allow_tool_actions: Option<bool>,
628 /// Where to show a popup notification when the agent is waiting for user input.
629 ///
630 /// Default: "primary_screen"
631 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
632 /// Whether to stream edits from the agent as they are received.
633 ///
634 /// Default: false
635 stream_edits: Option<bool>,
636 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
637 ///
638 /// Default: true
639 single_file_review: Option<bool>,
640 /// Additional parameters for language model requests. When making a request
641 /// to a model, parameters will be taken from the last entry in this list
642 /// that matches the model's provider and name. In each entry, both provider
643 /// and model are optional, so that you can specify parameters for either
644 /// one.
645 ///
646 /// Default: []
647 #[serde(default)]
648 model_parameters: Vec<LanguageModelParameters>,
649
650 /// What completion mode to enable for new threads
651 ///
652 /// Default: normal
653 preferred_completion_mode: Option<CompletionMode>,
654 /// Whether to show thumb buttons for feedback in the agent panel.
655 ///
656 /// Default: true
657 enable_feedback: Option<bool>,
658}
659
660#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
661#[serde(rename_all = "snake_case")]
662pub enum CompletionMode {
663 #[default]
664 Normal,
665 Max,
666}
667
668impl From<CompletionMode> for zed_llm_client::CompletionMode {
669 fn from(value: CompletionMode) -> Self {
670 match value {
671 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
672 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
673 }
674 }
675}
676
677#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
678pub struct LanguageModelSelection {
679 pub provider: LanguageModelProviderSetting,
680 pub model: String,
681}
682
683#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
684pub struct LanguageModelProviderSetting(pub String);
685
686impl JsonSchema for LanguageModelProviderSetting {
687 fn schema_name() -> String {
688 "LanguageModelProviderSetting".into()
689 }
690
691 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
692 schemars::schema::SchemaObject {
693 enum_values: Some(vec![
694 "anthropic".into(),
695 "amazon-bedrock".into(),
696 "google".into(),
697 "lmstudio".into(),
698 "ollama".into(),
699 "openai".into(),
700 "zed.dev".into(),
701 "copilot_chat".into(),
702 "deepseek".into(),
703 ]),
704 ..Default::default()
705 }
706 .into()
707 }
708}
709
710impl From<String> for LanguageModelProviderSetting {
711 fn from(provider: String) -> Self {
712 Self(provider)
713 }
714}
715
716impl From<&str> for LanguageModelProviderSetting {
717 fn from(provider: &str) -> Self {
718 Self(provider.to_string())
719 }
720}
721
722impl Default for LanguageModelSelection {
723 fn default() -> Self {
724 Self {
725 provider: LanguageModelProviderSetting("openai".to_string()),
726 model: "gpt-4".to_string(),
727 }
728 }
729}
730
731#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
732pub struct AgentProfileContent {
733 pub name: Arc<str>,
734 #[serde(default)]
735 pub tools: IndexMap<Arc<str>, bool>,
736 /// Whether all context servers are enabled by default.
737 pub enable_all_context_servers: Option<bool>,
738 #[serde(default)]
739 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
740}
741
742#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
743pub struct ContextServerPresetContent {
744 pub tools: IndexMap<Arc<str>, bool>,
745}
746
747#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
748#[schemars(deny_unknown_fields)]
749pub struct AssistantSettingsContentV1 {
750 /// Whether the Assistant is enabled.
751 ///
752 /// Default: true
753 enabled: Option<bool>,
754 /// Whether to show the assistant panel button in the status bar.
755 ///
756 /// Default: true
757 button: Option<bool>,
758 /// Where to dock the assistant.
759 ///
760 /// Default: right
761 dock: Option<AssistantDockPosition>,
762 /// Default width in pixels when the assistant is docked to the left or right.
763 ///
764 /// Default: 640
765 default_width: Option<f32>,
766 /// Default height in pixels when the assistant is docked to the bottom.
767 ///
768 /// Default: 320
769 default_height: Option<f32>,
770 /// The provider of the assistant service.
771 ///
772 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
773 /// each with their respective default models and configurations.
774 provider: Option<AssistantProviderContentV1>,
775}
776
777#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
778#[schemars(deny_unknown_fields)]
779pub struct LegacyAssistantSettingsContent {
780 /// Whether to show the assistant panel button in the status bar.
781 ///
782 /// Default: true
783 pub button: Option<bool>,
784 /// Where to dock the assistant.
785 ///
786 /// Default: right
787 pub dock: Option<AssistantDockPosition>,
788 /// Default width in pixels when the assistant is docked to the left or right.
789 ///
790 /// Default: 640
791 pub default_width: Option<f32>,
792 /// Default height in pixels when the assistant is docked to the bottom.
793 ///
794 /// Default: 320
795 pub default_height: Option<f32>,
796 /// The default OpenAI model to use when creating new chats.
797 ///
798 /// Default: gpt-4-1106-preview
799 pub default_open_ai_model: Option<OpenAiModel>,
800 /// OpenAI API base URL to use when creating new chats.
801 ///
802 /// Default: <https://api.openai.com/v1>
803 pub openai_api_url: Option<String>,
804}
805
806impl Settings for AssistantSettings {
807 const KEY: Option<&'static str> = Some("agent");
808
809 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
810
811 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
812
813 type FileContent = AssistantSettingsContent;
814
815 fn load(
816 sources: SettingsSources<Self::FileContent>,
817 _: &mut gpui::App,
818 ) -> anyhow::Result<Self> {
819 let mut settings = AssistantSettings::default();
820
821 for value in sources.defaults_and_customizations() {
822 if value.is_version_outdated() {
823 settings.using_outdated_settings_version = true;
824 }
825
826 let value = value.upgrade();
827 merge(&mut settings.enabled, value.enabled);
828 merge(&mut settings.button, value.button);
829 merge(&mut settings.dock, value.dock);
830 merge(
831 &mut settings.default_width,
832 value.default_width.map(Into::into),
833 );
834 merge(
835 &mut settings.default_height,
836 value.default_height.map(Into::into),
837 );
838 merge(&mut settings.default_model, value.default_model);
839 settings.inline_assistant_model = value
840 .inline_assistant_model
841 .or(settings.inline_assistant_model.take());
842 settings.commit_message_model = value
843 .commit_message_model
844 .or(settings.commit_message_model.take());
845 settings.thread_summary_model = value
846 .thread_summary_model
847 .or(settings.thread_summary_model.take());
848 merge(&mut settings.inline_alternatives, value.inline_alternatives);
849 merge(
850 &mut settings.always_allow_tool_actions,
851 value.always_allow_tool_actions,
852 );
853 merge(
854 &mut settings.notify_when_agent_waiting,
855 value.notify_when_agent_waiting,
856 );
857 merge(&mut settings.stream_edits, value.stream_edits);
858 merge(&mut settings.single_file_review, value.single_file_review);
859 merge(&mut settings.default_profile, value.default_profile);
860 merge(
861 &mut settings.preferred_completion_mode,
862 value.preferred_completion_mode,
863 );
864 merge(&mut settings.enable_feedback, value.enable_feedback);
865
866 settings
867 .model_parameters
868 .extend_from_slice(&value.model_parameters);
869
870 if let Some(profiles) = value.profiles {
871 settings
872 .profiles
873 .extend(profiles.into_iter().map(|(id, profile)| {
874 (
875 id,
876 AgentProfile {
877 name: profile.name.into(),
878 tools: profile.tools,
879 enable_all_context_servers: profile
880 .enable_all_context_servers
881 .unwrap_or_default(),
882 context_servers: profile
883 .context_servers
884 .into_iter()
885 .map(|(context_server_id, preset)| {
886 (
887 context_server_id,
888 ContextServerPreset {
889 tools: preset.tools.clone(),
890 },
891 )
892 })
893 .collect(),
894 },
895 )
896 }));
897 }
898 }
899
900 Ok(settings)
901 }
902
903 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
904 if let Some(b) = vscode
905 .read_value("chat.agent.enabled")
906 .and_then(|b| b.as_bool())
907 {
908 match &mut current.inner {
909 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
910 match versioned.as_mut() {
911 VersionedAssistantSettingsContent::V1(setting) => {
912 setting.enabled = Some(b);
913 setting.button = Some(b);
914 }
915
916 VersionedAssistantSettingsContent::V2(setting) => {
917 setting.enabled = Some(b);
918 setting.button = Some(b);
919 }
920 }
921 }
922 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
923 None => {
924 current.inner = Some(AssistantSettingsContentInner::for_v2(
925 AssistantSettingsContentV2 {
926 enabled: Some(b),
927 button: Some(b),
928 ..Default::default()
929 },
930 ));
931 }
932 }
933 }
934 }
935}
936
937fn merge<T>(target: &mut T, value: Option<T>) {
938 if let Some(value) = value {
939 *target = value;
940 }
941}
942
943#[cfg(test)]
944mod tests {
945 use fs::Fs;
946 use gpui::{ReadGlobal, TestAppContext};
947 use settings::SettingsStore;
948
949 use super::*;
950
951 #[gpui::test]
952 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
953 let fs = fs::FakeFs::new(cx.executor().clone());
954 fs.create_dir(paths::settings_file().parent().unwrap())
955 .await
956 .unwrap();
957
958 cx.update(|cx| {
959 let test_settings = settings::SettingsStore::test(cx);
960 cx.set_global(test_settings);
961 AssistantSettings::register(cx);
962 });
963
964 cx.update(|cx| {
965 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
966 assert_eq!(
967 AssistantSettings::get_global(cx).default_model,
968 LanguageModelSelection {
969 provider: "zed.dev".into(),
970 model: "claude-3-7-sonnet-latest".into(),
971 }
972 );
973 });
974
975 cx.update(|cx| {
976 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
977 fs.clone(),
978 |settings, _| {
979 *settings = AssistantSettingsContent {
980 inner: Some(AssistantSettingsContentInner::for_v2(
981 AssistantSettingsContentV2 {
982 default_model: Some(LanguageModelSelection {
983 provider: "test-provider".into(),
984 model: "gpt-99".into(),
985 }),
986 inline_assistant_model: None,
987 commit_message_model: None,
988 thread_summary_model: None,
989 inline_alternatives: None,
990 enabled: None,
991 button: None,
992 dock: None,
993 default_width: None,
994 default_height: None,
995 default_profile: None,
996 profiles: None,
997 always_allow_tool_actions: None,
998 notify_when_agent_waiting: None,
999 stream_edits: None,
1000 single_file_review: None,
1001 enable_feedback: None,
1002 model_parameters: Vec::new(),
1003 preferred_completion_mode: None,
1004 },
1005 )),
1006 }
1007 },
1008 );
1009 });
1010
1011 cx.run_until_parked();
1012
1013 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1014 assert!(raw_settings_value.contains(r#""version": "2""#));
1015
1016 #[derive(Debug, Deserialize)]
1017 struct AssistantSettingsTest {
1018 agent: AssistantSettingsContent,
1019 }
1020
1021 let assistant_settings: AssistantSettingsTest =
1022 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1023
1024 assert!(!assistant_settings.agent.is_version_outdated());
1025 }
1026
1027 #[gpui::test]
1028 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1029 let fs = fs::FakeFs::new(cx.executor().clone());
1030 fs.create_dir(paths::settings_file().parent().unwrap())
1031 .await
1032 .unwrap();
1033
1034 cx.update(|cx| {
1035 let mut test_settings = settings::SettingsStore::test(cx);
1036 let user_settings_content = r#"{
1037 "assistant": {
1038 "enabled": true,
1039 "version": "2",
1040 "default_model": {
1041 "provider": "zed.dev",
1042 "model": "gpt-99"
1043 },
1044 }}"#;
1045 test_settings
1046 .set_user_settings(user_settings_content, cx)
1047 .unwrap();
1048 cx.set_global(test_settings);
1049 AssistantSettings::register(cx);
1050 });
1051
1052 cx.run_until_parked();
1053
1054 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1055 assert!(assistant_settings.enabled);
1056 assert!(!assistant_settings.using_outdated_settings_version);
1057 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1058
1059 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1060 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1061 *settings = AssistantSettingsContent {
1062 inner: Some(AssistantSettingsContentInner::for_v2(
1063 AssistantSettingsContentV2 {
1064 enabled: Some(false),
1065 default_model: Some(LanguageModelSelection {
1066 provider: "xai".to_owned().into(),
1067 model: "grok".to_owned(),
1068 }),
1069 ..Default::default()
1070 },
1071 )),
1072 };
1073 });
1074 });
1075
1076 cx.run_until_parked();
1077
1078 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1079
1080 #[derive(Debug, Deserialize)]
1081 struct AssistantSettingsTest {
1082 assistant: AssistantSettingsContent,
1083 agent: Option<serde_json_lenient::Value>,
1084 }
1085
1086 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1087 assert!(assistant_settings.agent.is_none());
1088 }
1089}