1mod agent_profile;
2
3use std::sync::Arc;
4
5use ::open_ai::Model as OpenAiModel;
6use anthropic::Model as AnthropicModel;
7use anyhow::{Result, bail};
8use collections::IndexMap;
9use deepseek::Model as DeepseekModel;
10use gpui::{App, Pixels, SharedString};
11use language_model::LanguageModel;
12use lmstudio::Model as LmStudioModel;
13use mistral::Model as MistralModel;
14use ollama::Model as OllamaModel;
15use schemars::{JsonSchema, schema::Schema};
16use serde::{Deserialize, Serialize};
17use settings::{Settings, SettingsSources};
18
19pub use crate::agent_profile::*;
20
21pub fn init(cx: &mut App) {
22 AssistantSettings::register(cx);
23}
24
25#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
26#[serde(rename_all = "snake_case")]
27pub enum AssistantDockPosition {
28 Left,
29 #[default]
30 Right,
31 Bottom,
32}
33
34#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
35#[serde(rename_all = "snake_case")]
36pub enum DefaultView {
37 #[default]
38 Thread,
39 TextThread,
40}
41
42#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
43#[serde(rename_all = "snake_case")]
44pub enum NotifyWhenAgentWaiting {
45 #[default]
46 PrimaryScreen,
47 AllScreens,
48 Never,
49}
50
51#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
52#[serde(tag = "name", rename_all = "snake_case")]
53#[schemars(deny_unknown_fields)]
54pub enum AssistantProviderContentV1 {
55 #[serde(rename = "zed.dev")]
56 ZedDotDev { default_model: Option<String> },
57 #[serde(rename = "openai")]
58 OpenAi {
59 default_model: Option<OpenAiModel>,
60 api_url: Option<String>,
61 available_models: Option<Vec<OpenAiModel>>,
62 },
63 #[serde(rename = "anthropic")]
64 Anthropic {
65 default_model: Option<AnthropicModel>,
66 api_url: Option<String>,
67 },
68 #[serde(rename = "ollama")]
69 Ollama {
70 default_model: Option<OllamaModel>,
71 api_url: Option<String>,
72 },
73 #[serde(rename = "lmstudio")]
74 LmStudio {
75 default_model: Option<LmStudioModel>,
76 api_url: Option<String>,
77 },
78 #[serde(rename = "deepseek")]
79 DeepSeek {
80 default_model: Option<DeepseekModel>,
81 api_url: Option<String>,
82 },
83 #[serde(rename = "mistral")]
84 Mistral {
85 default_model: Option<MistralModel>,
86 api_url: Option<String>,
87 },
88}
89
90#[derive(Default, Clone, Debug)]
91pub struct AssistantSettings {
92 pub enabled: bool,
93 pub button: bool,
94 pub dock: AssistantDockPosition,
95 pub default_width: Pixels,
96 pub default_height: Pixels,
97 pub default_model: LanguageModelSelection,
98 pub inline_assistant_model: Option<LanguageModelSelection>,
99 pub commit_message_model: Option<LanguageModelSelection>,
100 pub thread_summary_model: Option<LanguageModelSelection>,
101 pub inline_alternatives: Vec<LanguageModelSelection>,
102 pub using_outdated_settings_version: bool,
103 pub default_profile: AgentProfileId,
104 pub default_view: DefaultView,
105 pub profiles: IndexMap<AgentProfileId, AgentProfile>,
106 pub always_allow_tool_actions: bool,
107 pub notify_when_agent_waiting: NotifyWhenAgentWaiting,
108 pub play_sound_when_agent_done: bool,
109 pub stream_edits: bool,
110 pub single_file_review: bool,
111 pub model_parameters: Vec<LanguageModelParameters>,
112 pub preferred_completion_mode: CompletionMode,
113 pub enable_feedback: bool,
114}
115
116impl AssistantSettings {
117 pub fn temperature_for_model(model: &Arc<dyn LanguageModel>, cx: &App) -> Option<f32> {
118 let settings = Self::get_global(cx);
119 settings
120 .model_parameters
121 .iter()
122 .rfind(|setting| setting.matches(model))
123 .and_then(|m| m.temperature)
124 }
125
126 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
127 self.inline_assistant_model = Some(LanguageModelSelection {
128 provider: provider.into(),
129 model,
130 });
131 }
132
133 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
134 self.commit_message_model = Some(LanguageModelSelection {
135 provider: provider.into(),
136 model,
137 });
138 }
139
140 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
141 self.thread_summary_model = Some(LanguageModelSelection {
142 provider: provider.into(),
143 model,
144 });
145 }
146}
147
148#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
149pub struct LanguageModelParameters {
150 pub provider: Option<LanguageModelProviderSetting>,
151 pub model: Option<SharedString>,
152 pub temperature: Option<f32>,
153}
154
155impl LanguageModelParameters {
156 pub fn matches(&self, model: &Arc<dyn LanguageModel>) -> bool {
157 if let Some(provider) = &self.provider {
158 if provider.0 != model.provider_id().0 {
159 return false;
160 }
161 }
162 if let Some(setting_model) = &self.model {
163 if *setting_model != model.id().0 {
164 return false;
165 }
166 }
167 true
168 }
169}
170
171/// Assistant panel settings
172#[derive(Clone, Serialize, Deserialize, Debug, Default)]
173pub struct AssistantSettingsContent {
174 #[serde(flatten)]
175 pub inner: Option<AssistantSettingsContentInner>,
176}
177
178#[derive(Clone, Serialize, Deserialize, Debug)]
179#[serde(untagged)]
180pub enum AssistantSettingsContentInner {
181 Versioned(Box<VersionedAssistantSettingsContent>),
182 Legacy(LegacyAssistantSettingsContent),
183}
184
185impl AssistantSettingsContentInner {
186 fn for_v2(content: AssistantSettingsContentV2) -> Self {
187 AssistantSettingsContentInner::Versioned(Box::new(VersionedAssistantSettingsContent::V2(
188 content,
189 )))
190 }
191}
192
193impl JsonSchema for AssistantSettingsContent {
194 fn schema_name() -> String {
195 VersionedAssistantSettingsContent::schema_name()
196 }
197
198 fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
199 VersionedAssistantSettingsContent::json_schema(r#gen)
200 }
201
202 fn is_referenceable() -> bool {
203 VersionedAssistantSettingsContent::is_referenceable()
204 }
205}
206
207impl AssistantSettingsContent {
208 pub fn is_version_outdated(&self) -> bool {
209 match &self.inner {
210 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
211 VersionedAssistantSettingsContent::V1(_) => true,
212 VersionedAssistantSettingsContent::V2(_) => false,
213 },
214 Some(AssistantSettingsContentInner::Legacy(_)) => true,
215 None => false,
216 }
217 }
218
219 fn upgrade(&self) -> AssistantSettingsContentV2 {
220 match &self.inner {
221 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
222 VersionedAssistantSettingsContent::V1(ref settings) => AssistantSettingsContentV2 {
223 enabled: settings.enabled,
224 button: settings.button,
225 dock: settings.dock,
226 default_width: settings.default_width,
227 default_height: settings.default_width,
228 default_model: settings
229 .provider
230 .clone()
231 .and_then(|provider| match provider {
232 AssistantProviderContentV1::ZedDotDev { default_model } => {
233 default_model.map(|model| LanguageModelSelection {
234 provider: "zed.dev".into(),
235 model,
236 })
237 }
238 AssistantProviderContentV1::OpenAi { default_model, .. } => {
239 default_model.map(|model| LanguageModelSelection {
240 provider: "openai".into(),
241 model: model.id().to_string(),
242 })
243 }
244 AssistantProviderContentV1::Anthropic { default_model, .. } => {
245 default_model.map(|model| LanguageModelSelection {
246 provider: "anthropic".into(),
247 model: model.id().to_string(),
248 })
249 }
250 AssistantProviderContentV1::Ollama { default_model, .. } => {
251 default_model.map(|model| LanguageModelSelection {
252 provider: "ollama".into(),
253 model: model.id().to_string(),
254 })
255 }
256 AssistantProviderContentV1::LmStudio { default_model, .. } => {
257 default_model.map(|model| LanguageModelSelection {
258 provider: "lmstudio".into(),
259 model: model.id().to_string(),
260 })
261 }
262 AssistantProviderContentV1::DeepSeek { default_model, .. } => {
263 default_model.map(|model| LanguageModelSelection {
264 provider: "deepseek".into(),
265 model: model.id().to_string(),
266 })
267 }
268 AssistantProviderContentV1::Mistral { default_model, .. } => {
269 default_model.map(|model| LanguageModelSelection {
270 provider: "mistral".into(),
271 model: model.id().to_string(),
272 })
273 }
274 }),
275 inline_assistant_model: None,
276 commit_message_model: None,
277 thread_summary_model: None,
278 inline_alternatives: None,
279 default_profile: None,
280 default_view: None,
281 profiles: None,
282 always_allow_tool_actions: None,
283 notify_when_agent_waiting: None,
284 stream_edits: None,
285 single_file_review: None,
286 model_parameters: Vec::new(),
287 preferred_completion_mode: None,
288 enable_feedback: None,
289 play_sound_when_agent_done: None,
290 },
291 VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
292 },
293 Some(AssistantSettingsContentInner::Legacy(settings)) => AssistantSettingsContentV2 {
294 enabled: None,
295 button: settings.button,
296 dock: settings.dock,
297 default_width: settings.default_width,
298 default_height: settings.default_height,
299 default_model: Some(LanguageModelSelection {
300 provider: "openai".into(),
301 model: settings
302 .default_open_ai_model
303 .clone()
304 .unwrap_or_default()
305 .id()
306 .to_string(),
307 }),
308 inline_assistant_model: None,
309 commit_message_model: None,
310 thread_summary_model: None,
311 inline_alternatives: None,
312 default_profile: None,
313 default_view: None,
314 profiles: None,
315 always_allow_tool_actions: None,
316 notify_when_agent_waiting: None,
317 stream_edits: None,
318 single_file_review: None,
319 model_parameters: Vec::new(),
320 preferred_completion_mode: None,
321 enable_feedback: None,
322 play_sound_when_agent_done: None,
323 },
324 None => AssistantSettingsContentV2::default(),
325 }
326 }
327
328 pub fn set_dock(&mut self, dock: AssistantDockPosition) {
329 match &mut self.inner {
330 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
331 VersionedAssistantSettingsContent::V1(ref mut settings) => {
332 settings.dock = Some(dock);
333 }
334 VersionedAssistantSettingsContent::V2(ref mut settings) => {
335 settings.dock = Some(dock);
336 }
337 },
338 Some(AssistantSettingsContentInner::Legacy(settings)) => {
339 settings.dock = Some(dock);
340 }
341 None => {
342 self.inner = Some(AssistantSettingsContentInner::for_v2(
343 AssistantSettingsContentV2 {
344 dock: Some(dock),
345 ..Default::default()
346 },
347 ))
348 }
349 }
350 }
351
352 pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
353 let model = language_model.id().0.to_string();
354 let provider = language_model.provider_id().0.to_string();
355
356 match &mut self.inner {
357 Some(AssistantSettingsContentInner::Versioned(settings)) => match **settings {
358 VersionedAssistantSettingsContent::V1(ref mut settings) => {
359 match provider.as_ref() {
360 "zed.dev" => {
361 log::warn!("attempted to set zed.dev model on outdated settings");
362 }
363 "anthropic" => {
364 let api_url = match &settings.provider {
365 Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
366 api_url.clone()
367 }
368 _ => None,
369 };
370 settings.provider = Some(AssistantProviderContentV1::Anthropic {
371 default_model: AnthropicModel::from_id(&model).ok(),
372 api_url,
373 });
374 }
375 "ollama" => {
376 let api_url = match &settings.provider {
377 Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
378 api_url.clone()
379 }
380 _ => None,
381 };
382 settings.provider = Some(AssistantProviderContentV1::Ollama {
383 default_model: Some(ollama::Model::new(
384 &model,
385 None,
386 None,
387 Some(language_model.supports_tools()),
388 )),
389 api_url,
390 });
391 }
392 "lmstudio" => {
393 let api_url = match &settings.provider {
394 Some(AssistantProviderContentV1::LmStudio { api_url, .. }) => {
395 api_url.clone()
396 }
397 _ => None,
398 };
399 settings.provider = Some(AssistantProviderContentV1::LmStudio {
400 default_model: Some(lmstudio::Model::new(
401 &model, None, None, false,
402 )),
403 api_url,
404 });
405 }
406 "openai" => {
407 let (api_url, available_models) = match &settings.provider {
408 Some(AssistantProviderContentV1::OpenAi {
409 api_url,
410 available_models,
411 ..
412 }) => (api_url.clone(), available_models.clone()),
413 _ => (None, None),
414 };
415 settings.provider = Some(AssistantProviderContentV1::OpenAi {
416 default_model: OpenAiModel::from_id(&model).ok(),
417 api_url,
418 available_models,
419 });
420 }
421 "deepseek" => {
422 let api_url = match &settings.provider {
423 Some(AssistantProviderContentV1::DeepSeek { api_url, .. }) => {
424 api_url.clone()
425 }
426 _ => None,
427 };
428 settings.provider = Some(AssistantProviderContentV1::DeepSeek {
429 default_model: DeepseekModel::from_id(&model).ok(),
430 api_url,
431 });
432 }
433 _ => {}
434 }
435 }
436 VersionedAssistantSettingsContent::V2(ref mut settings) => {
437 settings.default_model = Some(LanguageModelSelection {
438 provider: provider.into(),
439 model,
440 });
441 }
442 },
443 Some(AssistantSettingsContentInner::Legacy(settings)) => {
444 if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
445 settings.default_open_ai_model = Some(model);
446 }
447 }
448 None => {
449 self.inner = Some(AssistantSettingsContentInner::for_v2(
450 AssistantSettingsContentV2 {
451 default_model: Some(LanguageModelSelection {
452 provider: provider.into(),
453 model,
454 }),
455 ..Default::default()
456 },
457 ));
458 }
459 }
460 }
461
462 pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
463 self.v2_setting(|setting| {
464 setting.inline_assistant_model = Some(LanguageModelSelection {
465 provider: provider.into(),
466 model,
467 });
468 Ok(())
469 })
470 .ok();
471 }
472
473 pub fn set_commit_message_model(&mut self, provider: String, model: String) {
474 self.v2_setting(|setting| {
475 setting.commit_message_model = Some(LanguageModelSelection {
476 provider: provider.into(),
477 model,
478 });
479 Ok(())
480 })
481 .ok();
482 }
483
484 pub fn v2_setting(
485 &mut self,
486 f: impl FnOnce(&mut AssistantSettingsContentV2) -> anyhow::Result<()>,
487 ) -> anyhow::Result<()> {
488 match self.inner.get_or_insert_with(|| {
489 AssistantSettingsContentInner::for_v2(AssistantSettingsContentV2 {
490 ..Default::default()
491 })
492 }) {
493 AssistantSettingsContentInner::Versioned(boxed) => {
494 if let VersionedAssistantSettingsContent::V2(ref mut settings) = **boxed {
495 f(settings)
496 } else {
497 Ok(())
498 }
499 }
500 _ => Ok(()),
501 }
502 }
503
504 pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
505 self.v2_setting(|setting| {
506 setting.thread_summary_model = Some(LanguageModelSelection {
507 provider: provider.into(),
508 model,
509 });
510 Ok(())
511 })
512 .ok();
513 }
514
515 pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
516 self.v2_setting(|setting| {
517 setting.always_allow_tool_actions = Some(allow);
518 Ok(())
519 })
520 .ok();
521 }
522
523 pub fn set_play_sound_when_agent_done(&mut self, allow: bool) {
524 self.v2_setting(|setting| {
525 setting.play_sound_when_agent_done = Some(allow);
526 Ok(())
527 })
528 .ok();
529 }
530
531 pub fn set_single_file_review(&mut self, allow: bool) {
532 self.v2_setting(|setting| {
533 setting.single_file_review = Some(allow);
534 Ok(())
535 })
536 .ok();
537 }
538
539 pub fn set_profile(&mut self, profile_id: AgentProfileId) {
540 self.v2_setting(|setting| {
541 setting.default_profile = Some(profile_id);
542 Ok(())
543 })
544 .ok();
545 }
546
547 pub fn create_profile(
548 &mut self,
549 profile_id: AgentProfileId,
550 profile: AgentProfile,
551 ) -> Result<()> {
552 self.v2_setting(|settings| {
553 let profiles = settings.profiles.get_or_insert_default();
554 if profiles.contains_key(&profile_id) {
555 bail!("profile with ID '{profile_id}' already exists");
556 }
557
558 profiles.insert(
559 profile_id,
560 AgentProfileContent {
561 name: profile.name.into(),
562 tools: profile.tools,
563 enable_all_context_servers: Some(profile.enable_all_context_servers),
564 context_servers: profile
565 .context_servers
566 .into_iter()
567 .map(|(server_id, preset)| {
568 (
569 server_id,
570 ContextServerPresetContent {
571 tools: preset.tools,
572 },
573 )
574 })
575 .collect(),
576 },
577 );
578
579 Ok(())
580 })
581 }
582}
583
584#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
585#[serde(tag = "version")]
586#[schemars(deny_unknown_fields)]
587pub enum VersionedAssistantSettingsContent {
588 #[serde(rename = "1")]
589 V1(AssistantSettingsContentV1),
590 #[serde(rename = "2")]
591 V2(AssistantSettingsContentV2),
592}
593
594impl Default for VersionedAssistantSettingsContent {
595 fn default() -> Self {
596 Self::V2(AssistantSettingsContentV2 {
597 enabled: None,
598 button: None,
599 dock: None,
600 default_width: None,
601 default_height: None,
602 default_model: None,
603 inline_assistant_model: None,
604 commit_message_model: None,
605 thread_summary_model: None,
606 inline_alternatives: None,
607 default_profile: None,
608 default_view: None,
609 profiles: None,
610 always_allow_tool_actions: None,
611 notify_when_agent_waiting: None,
612 stream_edits: None,
613 single_file_review: None,
614 model_parameters: Vec::new(),
615 preferred_completion_mode: None,
616 enable_feedback: None,
617 play_sound_when_agent_done: None,
618 })
619 }
620}
621
622#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
623#[schemars(deny_unknown_fields)]
624pub struct AssistantSettingsContentV2 {
625 /// Whether the Assistant is enabled.
626 ///
627 /// Default: true
628 enabled: Option<bool>,
629 /// Whether to show the agent panel button in the status bar.
630 ///
631 /// Default: true
632 button: Option<bool>,
633 /// Where to dock the agent panel.
634 ///
635 /// Default: right
636 dock: Option<AssistantDockPosition>,
637 /// Default width in pixels when the agent panel is docked to the left or right.
638 ///
639 /// Default: 640
640 default_width: Option<f32>,
641 /// Default height in pixels when the agent panel is docked to the bottom.
642 ///
643 /// Default: 320
644 default_height: Option<f32>,
645 /// The default model to use when creating new chats and for other features when a specific model is not specified.
646 default_model: Option<LanguageModelSelection>,
647 /// Model to use for the inline assistant. Defaults to default_model when not specified.
648 inline_assistant_model: Option<LanguageModelSelection>,
649 /// Model to use for generating git commit messages. Defaults to default_model when not specified.
650 commit_message_model: Option<LanguageModelSelection>,
651 /// Model to use for generating thread summaries. Defaults to default_model when not specified.
652 thread_summary_model: Option<LanguageModelSelection>,
653 /// Additional models with which to generate alternatives when performing inline assists.
654 inline_alternatives: Option<Vec<LanguageModelSelection>>,
655 /// The default profile to use in the Agent.
656 ///
657 /// Default: write
658 default_profile: Option<AgentProfileId>,
659 /// Which view type to show by default in the agent panel.
660 ///
661 /// Default: "thread"
662 default_view: Option<DefaultView>,
663 /// The available agent profiles.
664 pub profiles: Option<IndexMap<AgentProfileId, AgentProfileContent>>,
665 /// Whenever a tool action would normally wait for your confirmation
666 /// that you allow it, always choose to allow it.
667 ///
668 /// Default: false
669 always_allow_tool_actions: Option<bool>,
670 /// Where to show a popup notification when the agent is waiting for user input.
671 ///
672 /// Default: "primary_screen"
673 notify_when_agent_waiting: Option<NotifyWhenAgentWaiting>,
674 /// Whether to play a sound when the agent has either completed its response, or needs user input.
675 ///
676 /// Default: false
677 play_sound_when_agent_done: Option<bool>,
678 /// Whether to stream edits from the agent as they are received.
679 ///
680 /// Default: false
681 stream_edits: Option<bool>,
682 /// Whether to display agent edits in single-file editors in addition to the review multibuffer pane.
683 ///
684 /// Default: true
685 single_file_review: Option<bool>,
686 /// Additional parameters for language model requests. When making a request
687 /// to a model, parameters will be taken from the last entry in this list
688 /// that matches the model's provider and name. In each entry, both provider
689 /// and model are optional, so that you can specify parameters for either
690 /// one.
691 ///
692 /// Default: []
693 #[serde(default)]
694 model_parameters: Vec<LanguageModelParameters>,
695 /// What completion mode to enable for new threads
696 ///
697 /// Default: normal
698 preferred_completion_mode: Option<CompletionMode>,
699 /// Whether to show thumb buttons for feedback in the agent panel.
700 ///
701 /// Default: true
702 enable_feedback: Option<bool>,
703}
704
705#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
706#[serde(rename_all = "snake_case")]
707pub enum CompletionMode {
708 #[default]
709 Normal,
710 Max,
711}
712
713impl From<CompletionMode> for zed_llm_client::CompletionMode {
714 fn from(value: CompletionMode) -> Self {
715 match value {
716 CompletionMode::Normal => zed_llm_client::CompletionMode::Normal,
717 CompletionMode::Max => zed_llm_client::CompletionMode::Max,
718 }
719 }
720}
721
722#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
723pub struct LanguageModelSelection {
724 pub provider: LanguageModelProviderSetting,
725 pub model: String,
726}
727
728#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
729pub struct LanguageModelProviderSetting(pub String);
730
731impl JsonSchema for LanguageModelProviderSetting {
732 fn schema_name() -> String {
733 "LanguageModelProviderSetting".into()
734 }
735
736 fn json_schema(_: &mut schemars::r#gen::SchemaGenerator) -> Schema {
737 schemars::schema::SchemaObject {
738 enum_values: Some(vec![
739 "anthropic".into(),
740 "amazon-bedrock".into(),
741 "google".into(),
742 "lmstudio".into(),
743 "ollama".into(),
744 "openai".into(),
745 "zed.dev".into(),
746 "copilot_chat".into(),
747 "deepseek".into(),
748 "mistral".into(),
749 ]),
750 ..Default::default()
751 }
752 .into()
753 }
754}
755
756impl From<String> for LanguageModelProviderSetting {
757 fn from(provider: String) -> Self {
758 Self(provider)
759 }
760}
761
762impl From<&str> for LanguageModelProviderSetting {
763 fn from(provider: &str) -> Self {
764 Self(provider.to_string())
765 }
766}
767
768impl Default for LanguageModelSelection {
769 fn default() -> Self {
770 Self {
771 provider: LanguageModelProviderSetting("openai".to_string()),
772 model: "gpt-4".to_string(),
773 }
774 }
775}
776
777#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
778pub struct AgentProfileContent {
779 pub name: Arc<str>,
780 #[serde(default)]
781 pub tools: IndexMap<Arc<str>, bool>,
782 /// Whether all context servers are enabled by default.
783 pub enable_all_context_servers: Option<bool>,
784 #[serde(default)]
785 pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
786}
787
788#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
789pub struct ContextServerPresetContent {
790 pub tools: IndexMap<Arc<str>, bool>,
791}
792
793#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
794#[schemars(deny_unknown_fields)]
795pub struct AssistantSettingsContentV1 {
796 /// Whether the Assistant is enabled.
797 ///
798 /// Default: true
799 enabled: Option<bool>,
800 /// Whether to show the assistant panel button in the status bar.
801 ///
802 /// Default: true
803 button: Option<bool>,
804 /// Where to dock the assistant.
805 ///
806 /// Default: right
807 dock: Option<AssistantDockPosition>,
808 /// Default width in pixels when the assistant is docked to the left or right.
809 ///
810 /// Default: 640
811 default_width: Option<f32>,
812 /// Default height in pixels when the assistant is docked to the bottom.
813 ///
814 /// Default: 320
815 default_height: Option<f32>,
816 /// The provider of the assistant service.
817 ///
818 /// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
819 /// each with their respective default models and configurations.
820 provider: Option<AssistantProviderContentV1>,
821}
822
823#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
824#[schemars(deny_unknown_fields)]
825pub struct LegacyAssistantSettingsContent {
826 /// Whether to show the assistant panel button in the status bar.
827 ///
828 /// Default: true
829 pub button: Option<bool>,
830 /// Where to dock the assistant.
831 ///
832 /// Default: right
833 pub dock: Option<AssistantDockPosition>,
834 /// Default width in pixels when the assistant is docked to the left or right.
835 ///
836 /// Default: 640
837 pub default_width: Option<f32>,
838 /// Default height in pixels when the assistant is docked to the bottom.
839 ///
840 /// Default: 320
841 pub default_height: Option<f32>,
842 /// The default OpenAI model to use when creating new chats.
843 ///
844 /// Default: gpt-4-1106-preview
845 pub default_open_ai_model: Option<OpenAiModel>,
846 /// OpenAI API base URL to use when creating new chats.
847 ///
848 /// Default: <https://api.openai.com/v1>
849 pub openai_api_url: Option<String>,
850}
851
852impl Settings for AssistantSettings {
853 const KEY: Option<&'static str> = Some("agent");
854
855 const FALLBACK_KEY: Option<&'static str> = Some("assistant");
856
857 const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
858
859 type FileContent = AssistantSettingsContent;
860
861 fn load(
862 sources: SettingsSources<Self::FileContent>,
863 _: &mut gpui::App,
864 ) -> anyhow::Result<Self> {
865 let mut settings = AssistantSettings::default();
866
867 for value in sources.defaults_and_customizations() {
868 if value.is_version_outdated() {
869 settings.using_outdated_settings_version = true;
870 }
871
872 let value = value.upgrade();
873 merge(&mut settings.enabled, value.enabled);
874 merge(&mut settings.button, value.button);
875 merge(&mut settings.dock, value.dock);
876 merge(
877 &mut settings.default_width,
878 value.default_width.map(Into::into),
879 );
880 merge(
881 &mut settings.default_height,
882 value.default_height.map(Into::into),
883 );
884 merge(&mut settings.default_model, value.default_model);
885 settings.inline_assistant_model = value
886 .inline_assistant_model
887 .or(settings.inline_assistant_model.take());
888 settings.commit_message_model = value
889 .commit_message_model
890 .or(settings.commit_message_model.take());
891 settings.thread_summary_model = value
892 .thread_summary_model
893 .or(settings.thread_summary_model.take());
894 merge(&mut settings.inline_alternatives, value.inline_alternatives);
895 merge(
896 &mut settings.always_allow_tool_actions,
897 value.always_allow_tool_actions,
898 );
899 merge(
900 &mut settings.notify_when_agent_waiting,
901 value.notify_when_agent_waiting,
902 );
903 merge(
904 &mut settings.play_sound_when_agent_done,
905 value.play_sound_when_agent_done,
906 );
907 merge(&mut settings.stream_edits, value.stream_edits);
908 merge(&mut settings.single_file_review, value.single_file_review);
909 merge(&mut settings.default_profile, value.default_profile);
910 merge(&mut settings.default_view, value.default_view);
911 merge(
912 &mut settings.preferred_completion_mode,
913 value.preferred_completion_mode,
914 );
915 merge(&mut settings.enable_feedback, value.enable_feedback);
916
917 settings
918 .model_parameters
919 .extend_from_slice(&value.model_parameters);
920
921 if let Some(profiles) = value.profiles {
922 settings
923 .profiles
924 .extend(profiles.into_iter().map(|(id, profile)| {
925 (
926 id,
927 AgentProfile {
928 name: profile.name.into(),
929 tools: profile.tools,
930 enable_all_context_servers: profile
931 .enable_all_context_servers
932 .unwrap_or_default(),
933 context_servers: profile
934 .context_servers
935 .into_iter()
936 .map(|(context_server_id, preset)| {
937 (
938 context_server_id,
939 ContextServerPreset {
940 tools: preset.tools.clone(),
941 },
942 )
943 })
944 .collect(),
945 },
946 )
947 }));
948 }
949 }
950
951 Ok(settings)
952 }
953
954 fn import_from_vscode(vscode: &settings::VsCodeSettings, current: &mut Self::FileContent) {
955 if let Some(b) = vscode
956 .read_value("chat.agent.enabled")
957 .and_then(|b| b.as_bool())
958 {
959 match &mut current.inner {
960 Some(AssistantSettingsContentInner::Versioned(versioned)) => {
961 match versioned.as_mut() {
962 VersionedAssistantSettingsContent::V1(setting) => {
963 setting.enabled = Some(b);
964 setting.button = Some(b);
965 }
966
967 VersionedAssistantSettingsContent::V2(setting) => {
968 setting.enabled = Some(b);
969 setting.button = Some(b);
970 }
971 }
972 }
973 Some(AssistantSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
974 None => {
975 current.inner = Some(AssistantSettingsContentInner::for_v2(
976 AssistantSettingsContentV2 {
977 enabled: Some(b),
978 button: Some(b),
979 ..Default::default()
980 },
981 ));
982 }
983 }
984 }
985 }
986}
987
988fn merge<T>(target: &mut T, value: Option<T>) {
989 if let Some(value) = value {
990 *target = value;
991 }
992}
993
994#[cfg(test)]
995mod tests {
996 use fs::Fs;
997 use gpui::{ReadGlobal, TestAppContext};
998 use settings::SettingsStore;
999
1000 use super::*;
1001
1002 #[gpui::test]
1003 async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
1004 let fs = fs::FakeFs::new(cx.executor().clone());
1005 fs.create_dir(paths::settings_file().parent().unwrap())
1006 .await
1007 .unwrap();
1008
1009 cx.update(|cx| {
1010 let test_settings = settings::SettingsStore::test(cx);
1011 cx.set_global(test_settings);
1012 AssistantSettings::register(cx);
1013 });
1014
1015 cx.update(|cx| {
1016 assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
1017 assert_eq!(
1018 AssistantSettings::get_global(cx).default_model,
1019 LanguageModelSelection {
1020 provider: "zed.dev".into(),
1021 model: "claude-3-7-sonnet-latest".into(),
1022 }
1023 );
1024 });
1025
1026 cx.update(|cx| {
1027 settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
1028 fs.clone(),
1029 |settings, _| {
1030 *settings = AssistantSettingsContent {
1031 inner: Some(AssistantSettingsContentInner::for_v2(
1032 AssistantSettingsContentV2 {
1033 default_model: Some(LanguageModelSelection {
1034 provider: "test-provider".into(),
1035 model: "gpt-99".into(),
1036 }),
1037 inline_assistant_model: None,
1038 commit_message_model: None,
1039 thread_summary_model: None,
1040 inline_alternatives: None,
1041 enabled: None,
1042 button: None,
1043 dock: None,
1044 default_width: None,
1045 default_height: None,
1046 default_profile: None,
1047 default_view: None,
1048 profiles: None,
1049 always_allow_tool_actions: None,
1050 play_sound_when_agent_done: None,
1051 notify_when_agent_waiting: None,
1052 stream_edits: None,
1053 single_file_review: None,
1054 enable_feedback: None,
1055 model_parameters: Vec::new(),
1056 preferred_completion_mode: None,
1057 },
1058 )),
1059 }
1060 },
1061 );
1062 });
1063
1064 cx.run_until_parked();
1065
1066 let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
1067 assert!(raw_settings_value.contains(r#""version": "2""#));
1068
1069 #[derive(Debug, Deserialize)]
1070 struct AssistantSettingsTest {
1071 agent: AssistantSettingsContent,
1072 }
1073
1074 let assistant_settings: AssistantSettingsTest =
1075 serde_json_lenient::from_str(&raw_settings_value).unwrap();
1076
1077 assert!(!assistant_settings.agent.is_version_outdated());
1078 }
1079
1080 #[gpui::test]
1081 async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
1082 let fs = fs::FakeFs::new(cx.executor().clone());
1083 fs.create_dir(paths::settings_file().parent().unwrap())
1084 .await
1085 .unwrap();
1086
1087 cx.update(|cx| {
1088 let mut test_settings = settings::SettingsStore::test(cx);
1089 let user_settings_content = r#"{
1090 "assistant": {
1091 "enabled": true,
1092 "version": "2",
1093 "default_model": {
1094 "provider": "zed.dev",
1095 "model": "gpt-99"
1096 },
1097 }}"#;
1098 test_settings
1099 .set_user_settings(user_settings_content, cx)
1100 .unwrap();
1101 cx.set_global(test_settings);
1102 AssistantSettings::register(cx);
1103 });
1104
1105 cx.run_until_parked();
1106
1107 let assistant_settings = cx.update(|cx| AssistantSettings::get_global(cx).clone());
1108 assert!(assistant_settings.enabled);
1109 assert!(!assistant_settings.using_outdated_settings_version);
1110 assert_eq!(assistant_settings.default_model.model, "gpt-99");
1111
1112 cx.update_global::<SettingsStore, _>(|settings_store, cx| {
1113 settings_store.update_user_settings::<AssistantSettings>(cx, |settings| {
1114 *settings = AssistantSettingsContent {
1115 inner: Some(AssistantSettingsContentInner::for_v2(
1116 AssistantSettingsContentV2 {
1117 enabled: Some(false),
1118 default_model: Some(LanguageModelSelection {
1119 provider: "xai".to_owned().into(),
1120 model: "grok".to_owned(),
1121 }),
1122 ..Default::default()
1123 },
1124 )),
1125 };
1126 });
1127 });
1128
1129 cx.run_until_parked();
1130
1131 let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
1132
1133 #[derive(Debug, Deserialize)]
1134 struct AssistantSettingsTest {
1135 assistant: AssistantSettingsContent,
1136 agent: Option<serde_json_lenient::Value>,
1137 }
1138
1139 let assistant_settings: AssistantSettingsTest = serde_json::from_value(settings).unwrap();
1140 assert!(assistant_settings.agent.is_none());
1141 }
1142}