From a2f4adbcff1d0e4bfc582350c246a2a496806c04 Mon Sep 17 00:00:00 2001 From: Xin Zhao Date: Thu, 2 Apr 2026 14:43:16 +0800 Subject: [PATCH] editor: Support disabling semantic token highlighting via empty rules (#52963) Self-Review Checklist: - [x] I've reviewed my own diff for quality, security, and reliability - [x] Unsafe blocks (if any) have justifying comments - [x] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) - [x] Tests cover the new/changed behavior - [x] Performance impact has been considered and is acceptable Closes #52882, should help #52723 According to the [Zed documentation](https://zed.dev/docs/semantic-tokens#example-disabling-a-token-type), users should be able to disable semantic highlighting for a specific token type by adding an empty rule in `settings.json`. However, the current implementation fails to respect this because its merging logic allows lower-priority default styles to "leak through" even when a matching high-priority rule is empty. This makes it impossible to selectively disable semantic tokens to reveal the underlying Tree-sitter highlighting when using "semantic_tokens": "combined". This is particularly problematic for extensions that provide specialized Tree-sitter queries which are currently being obscured by less desirable semantic tokens(#52723). This PR fixes the logic to ensure that a completely empty high-priority rule acts as an opaque override, correctly disabling semantic styling for that token type and allowing Tree-sitter highlighting to show through as intended. Release Notes: - Fixed a bug where semantic token highlighting could not be disabled via empty rules in `settings.json`. --- crates/editor/src/semantic_tokens.rs | 278 ++++++++++++++++++++++++- crates/settings_content/src/project.rs | 12 ++ 2 files changed, 283 insertions(+), 7 deletions(-) diff --git a/crates/editor/src/semantic_tokens.rs b/crates/editor/src/semantic_tokens.rs index 8408438f17533098f906c75bcc03983edfb7acf8..9db95d50c32ae88eead00979ea01e1bfee392f3d 100644 --- a/crates/editor/src/semantic_tokens.rs +++ b/crates/editor/src/semantic_tokens.rs @@ -365,11 +365,20 @@ fn convert_token( modifiers: u32, ) -> Option { let rules = stylizer.rules_for_token(token_type)?; - let matching = rules.iter().filter(|rule| { - rule.token_modifiers - .iter() - .all(|m| stylizer.has_modifier(modifiers, m)) - }); + let matching: Vec<_> = rules + .iter() + .filter(|rule| { + rule.token_modifiers + .iter() + .all(|m| stylizer.has_modifier(modifiers, m)) + }) + .collect(); + + if let Some(rule) = matching.last() { + if rule.no_style_defined() { + return None; + } + } let mut highlight = HighlightStyle::default(); let mut empty = true; @@ -463,7 +472,9 @@ mod tests { }; use futures::StreamExt as _; - use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext}; + use gpui::{ + AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _, + }; use language::{Language, LanguageConfig, LanguageMatcher}; use languages::FakeLspAdapter; use multi_buffer::{ @@ -472,7 +483,10 @@ mod tests { use project::Project; use rope::Point; use serde_json::json; - use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore}; + use settings::{ + GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules, + SemanticTokens, SettingsStore, + }; use workspace::{MultiWorkspace, WorkspaceHandle as _}; use crate::{ @@ -1816,6 +1830,256 @@ mod tests { ); } + #[gpui::test] + async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["function".into()], + token_modifiers: vec![], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![0, 3, 4, 0, 0], + result_id: None, + }, + ))) + }, + ); + + // Verify it highlights by default + cx.set_state("ˇfn main() {}"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1); + + // Apply EMPTY rule to disable it + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("function".to_string()), + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇfn main() { }"); + full_request.next().await; + cx.run_until_parked(); + + assert!( + extract_semantic_highlights(&cx.editor, &cx).is_empty(), + "Highlighting should be disabled by empty style setting" + ); + } + + #[gpui::test] + async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) { + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["comment".into()], + token_modifiers: vec!["documentation".into()], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![0, 0, 5, 0, 1], // comment [documentation] + result_id: None, + }, + ))) + }, + ); + + cx.set_state("ˇ/// d\n"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 1, + "Documentation comment should be highlighted" + ); + + // Apply a BROAD empty rule for "comment" (no modifiers) + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("comment".to_string()), + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇ/// d\n"); + full_request.next().await; + cx.run_until_parked(); + + assert!( + extract_semantic_highlights(&cx.editor, &cx).is_empty(), + "Broad empty rule should disable specific documentation comment" + ); + } + + #[gpui::test] + async fn test_semantic_token_specific_rule_does_not_disable_broad_token( + cx: &mut TestAppContext, + ) { + use gpui::UpdateGlobal as _; + use settings::{GlobalLspSettingsContent, SemanticTokenRule}; + + init_test(cx, |_| {}); + update_test_language_settings(cx, &|s| { + s.languages.0.insert( + "Rust".into(), + LanguageSettingsContent { + semantic_tokens: Some(SemanticTokens::Full), + ..Default::default() + }, + ); + }); + + let mut cx = EditorLspTestContext::new_rust( + lsp::ServerCapabilities { + semantic_tokens_provider: Some( + lsp::SemanticTokensServerCapabilities::SemanticTokensOptions( + lsp::SemanticTokensOptions { + legend: lsp::SemanticTokensLegend { + token_types: vec!["comment".into()], + token_modifiers: vec!["documentation".into()], + }, + full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }), + ..Default::default() + }, + ), + ), + ..Default::default() + }, + cx, + ) + .await; + + let mut full_request = cx + .set_request_handler::( + move |_, _, _| async move { + Ok(Some(lsp::SemanticTokensResult::Tokens( + lsp::SemanticTokens { + data: vec![ + 0, 0, 5, 0, 1, // comment [documentation] + 1, 0, 5, 0, 0, // normal comment + ], + result_id: None, + }, + ))) + }, + ); + + cx.set_state("ˇ/// d\n// n\n"); + full_request.next().await; + cx.run_until_parked(); + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 2, + "Both documentation and normal comments should be highlighted initially" + ); + + // Apply a SPECIFIC empty rule for documentation only + cx.update(|_, cx| { + SettingsStore::update_global(cx, |store, cx| { + store.update_user_settings(cx, |settings| { + settings.global_lsp_settings = Some(GlobalLspSettingsContent { + semantic_token_rules: Some(SemanticTokenRules { + rules: vec![SemanticTokenRule { + token_type: Some("comment".to_string()), + token_modifiers: vec!["documentation".to_string()], + ..Default::default() + }], + }), + ..Default::default() + }); + }); + }); + }); + + cx.set_state("ˇ/// d\n// n\n"); + full_request.next().await; + cx.run_until_parked(); + + assert_eq!( + extract_semantic_highlights(&cx.editor, &cx).len(), + 1, + "Normal comment should still be highlighted (matched by default rule)" + ); + } + fn extract_semantic_highlight_styles( editor: &Entity, cx: &TestAppContext, diff --git a/crates/settings_content/src/project.rs b/crates/settings_content/src/project.rs index 789f3786cb0d39444370d78e92d3d342773cafd5..6e8b296ef21efa838833038582de82b3ebc4f28b 100644 --- a/crates/settings_content/src/project.rs +++ b/crates/settings_content/src/project.rs @@ -276,6 +276,18 @@ pub struct SemanticTokenRule { pub font_style: Option, } +impl SemanticTokenRule { + pub fn no_style_defined(&self) -> bool { + self.style.is_empty() + && self.foreground_color.is_none() + && self.background_color.is_none() + && self.underline.is_none() + && self.strikethrough.is_none() + && self.font_weight.is_none() + && self.font_style.is_none() + } +} + #[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)] #[serde(untagged)] pub enum SemanticTokenColorOverride {