extension_host: Allow extensions to define semantic highlighting rules (#49282)

Lukas Wirth created

for their given language via a `semantic_token_rules.json` file

Release Notes:

- N/A *or* Added/Fixed/Improved ...

Change summary

assets/settings/default_semantic_token_rules.json |  4 
crates/extension_host/src/extension_host.rs       | 42 +++++++++++
crates/languages/src/lib.rs                       |  4 
crates/project/src/lsp_store.rs                   |  4 +
crates/project/src/lsp_store/semantic_tokens.rs   | 16 ++++
crates/settings/src/settings.rs                   |  6 
crates/settings/src/settings_store.rs             | 56 ++++++++++------
docs/src/extensions/languages.md                  | 42 ++++++++++++
docs/src/semantic-tokens.md                       |  4 
9 files changed, 142 insertions(+), 36 deletions(-)

Detailed changes

assets/settings/default_semantic_token_rules.json 🔗

@@ -2,7 +2,9 @@
 //
 // These rules map LSP semantic token types to syntax theme styles.
 // To customize, add rules to "semantic_token_rules" in your settings.json.
-// User-defined rules are prepended to these defaults and take precedence.
+// User-defined rules are prepended and take highest precedence.
+// Extension language rules are applied next.
+// These built-in defaults are applied last.
 //
 // Each rule has the following properties:
 // - `token_type`: The LSP semantic token type to match. If omitted, matches all types.

crates/extension_host/src/extension_host.rs 🔗

@@ -32,8 +32,8 @@ use futures::{
     select_biased,
 };
 use gpui::{
-    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, WeakEntity,
-    actions,
+    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, Task, UpdateGlobal as _,
+    WeakEntity, actions,
 };
 use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
 use language::{
@@ -46,7 +46,7 @@ use release_channel::ReleaseChannel;
 use remote::RemoteClient;
 use semver::Version;
 use serde::{Deserialize, Serialize};
-use settings::Settings;
+use settings::{SemanticTokenRules, Settings, SettingsStore};
 use std::ops::RangeInclusive;
 use std::str::FromStr;
 use std::{
@@ -1220,6 +1220,15 @@ impl ExtensionStore {
         self.proxy
             .remove_languages(&languages_to_remove, &grammars_to_remove);
 
+        // Remove semantic token rules for languages being unloaded.
+        if !languages_to_remove.is_empty() {
+            SettingsStore::update_global(cx, |store, cx| {
+                for language in &languages_to_remove {
+                    store.remove_language_semantic_token_rules(language.as_ref(), cx);
+                }
+            });
+        }
+
         let mut grammars_to_add = Vec::new();
         let mut themes_to_add = Vec::new();
         let mut icon_themes_to_add = Vec::new();
@@ -1267,12 +1276,30 @@ impl ExtensionStore {
             .iter()
             .filter(|(_, entry)| extensions_to_load.contains(&entry.extension))
             .collect::<Vec<_>>();
+        let mut semantic_token_rules_to_add: Vec<(LanguageName, SemanticTokenRules)> = Vec::new();
         for (language_name, language) in languages_to_add {
             let mut language_path = self.installed_dir.clone();
             language_path.extend([
                 Path::new(language.extension.as_ref()),
                 language.path.as_path(),
             ]);
+
+            // Load semantic token rules if present in the language directory.
+            let rules_path = language_path.join("semantic_token_rules.json");
+            if let Ok(rules_json) = std::fs::read_to_string(&rules_path) {
+                match serde_json_lenient::from_str::<SemanticTokenRules>(&rules_json) {
+                    Ok(rules) => {
+                        semantic_token_rules_to_add.push((language_name.clone(), rules));
+                    }
+                    Err(err) => {
+                        log::error!(
+                            "Failed to parse semantic token rules from {}: {err:#}",
+                            rules_path.display()
+                        );
+                    }
+                }
+            }
+
             self.proxy.register_language(
                 language_name.clone(),
                 language.grammar.clone(),
@@ -1302,6 +1329,15 @@ impl ExtensionStore {
             );
         }
 
+        // Register semantic token rules for newly loaded extension languages.
+        if !semantic_token_rules_to_add.is_empty() {
+            SettingsStore::update_global(cx, |store, cx| {
+                for (language_name, rules) in semantic_token_rules_to_add {
+                    store.set_language_semantic_token_rules(language_name.0.clone(), rules, cx);
+                }
+            });
+        }
+
         let fs = self.fs.clone();
         let wasm_host = self.wasm_host.clone();
         let root_dir = self.installed_dir.clone();

crates/languages/src/lib.rs 🔗

@@ -368,8 +368,8 @@ fn register_language(
 ) {
     let config = load_config(name);
     if let Some(rules) = &semantic_token_rules {
-        SettingsStore::update_global(cx, |store, _| {
-            store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone());
+        SettingsStore::update_global(cx, |store, cx| {
+            store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone(), cx);
         });
     }
     for adapter in adapters {

crates/project/src/lsp_store.rs 🔗

@@ -5106,6 +5106,10 @@ impl LspStore {
             .clone();
         self.semantic_token_config
             .update_rules(new_semantic_token_rules);
+        // Always clear cached stylizers so that changes to language-specific
+        // semantic token rules (e.g. from extension install/uninstall) are
+        // picked up. Stylizers are recreated lazily, so this is cheap.
+        self.semantic_token_config.clear_stylizers();
 
         let new_global_semantic_tokens_mode =
             all_language_settings(None, cx).defaults.semantic_tokens;

crates/project/src/lsp_store/semantic_tokens.rs 🔗

@@ -12,8 +12,11 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedSt
 use language::{Buffer, LanguageName, language_settings::all_language_settings};
 use lsp::{AdapterServerCapabilities, LanguageServerId};
 use rpc::{TypedEnvelope, proto};
-use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore};
+use settings::{
+    DefaultSemanticTokenRules, SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore,
+};
 use smol::future::yield_now;
+
 use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped};
 use util::ResultExt as _;
 
@@ -58,6 +61,15 @@ impl SemanticTokenConfig {
         }
     }
 
+    /// Clears all cached stylizers.
+    ///
+    /// This is called when settings change to ensure that any modifications to
+    /// language-specific semantic token rules (e.g. from extension install/uninstall)
+    /// are picked up. Stylizers are recreated lazily on next use.
+    pub(super) fn clear_stylizers(&mut self) {
+        self.stylizers.clear();
+    }
+
     pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool {
         if new_mode != self.global_mode {
             self.global_mode = new_mode;
@@ -462,6 +474,7 @@ impl SemanticTokenStylizer {
         let global_rules = &ProjectSettings::get_global(cx)
             .global_lsp_settings
             .semantic_token_rules;
+        let default_rules = cx.global::<DefaultSemanticTokenRules>();
 
         let rules_by_token_type = token_types
             .iter()
@@ -475,6 +488,7 @@ impl SemanticTokenStylizer {
                     .rules
                     .iter()
                     .chain(language_rules.into_iter().flat_map(|lr| &lr.rules))
+                    .chain(default_rules.0.rules.iter())
                     .rev()
                     .filter(filter)
                     .cloned()

crates/settings/src/settings.rs 🔗

@@ -44,9 +44,9 @@ pub use keymap_file::{
 pub use settings_file::*;
 pub use settings_json::*;
 pub use settings_store::{
-    InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX, LocalSettingsKind, LocalSettingsPath,
-    MigrationStatus, Settings, SettingsFile, SettingsJsonSchemaParams, SettingsKey,
-    SettingsLocation, SettingsParseResult, SettingsStore,
+    DefaultSemanticTokenRules, InvalidSettingsError, LSP_SETTINGS_SCHEMA_URL_PREFIX,
+    LocalSettingsKind, LocalSettingsPath, MigrationStatus, Settings, SettingsFile,
+    SettingsJsonSchemaParams, SettingsKey, SettingsLocation, SettingsParseResult, SettingsStore,
 };
 
 pub use vscode_import::{VsCodeSettings, VsCodeSettingsSource};

crates/settings/src/settings_store.rs 🔗

@@ -241,6 +241,11 @@ impl LocalSettingsPath {
 
 impl Global for SettingsStore {}
 
+#[derive(Default)]
+pub struct DefaultSemanticTokenRules(pub SemanticTokenRules);
+
+impl gpui::Global for DefaultSemanticTokenRules {}
+
 #[doc(hidden)]
 #[derive(Debug)]
 pub struct SettingValue<T> {
@@ -275,29 +280,22 @@ pub struct SettingsJsonSchemaParams<'a> {
 
 impl SettingsStore {
     pub fn new(cx: &mut App, default_settings: &str) -> Self {
-        Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules())
+        Self::new_with_semantic_tokens(cx, default_settings)
     }
 
-    pub fn new_with_semantic_tokens(
-        cx: &mut App,
-        default_settings: &str,
-        default_semantic_tokens: &str,
-    ) -> Self {
+    pub fn new_with_semantic_tokens(cx: &mut App, default_settings: &str) -> Self {
         let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded();
-        let mut default_settings: SettingsContent =
+        let default_settings: SettingsContent =
             SettingsContent::parse_json_with_comments(default_settings).unwrap();
-        if let Ok(semantic_token_rules) =
-            crate::parse_json_with_comments::<SemanticTokenRules>(default_semantic_tokens)
-        {
-            let global_lsp = default_settings
-                .global_lsp_settings
-                .get_or_insert_with(Default::default);
-            let existing_rules = global_lsp
-                .semantic_token_rules
-                .get_or_insert_with(Default::default);
-            existing_rules.rules.extend(semantic_token_rules.rules);
+        if !cx.has_global::<DefaultSemanticTokenRules>() {
+            cx.set_global::<DefaultSemanticTokenRules>(
+                crate::parse_json_with_comments::<SemanticTokenRules>(
+                    &crate::default_semantic_token_rules(),
+                )
+                .map(DefaultSemanticTokenRules)
+                .unwrap_or_default(),
+            );
         }
-
         let default_settings: Rc<SettingsContent> = default_settings.into();
         let mut this = Self {
             setting_values: Default::default(),
@@ -868,18 +866,30 @@ impl SettingsStore {
     /// Sets language-specific semantic token rules.
     ///
     /// These rules are registered by language modules (e.g. the Rust language module)
-    /// and are stored separately from the global rules. They are only applied to
-    /// buffers of the matching language by the `SemanticTokenStylizer`.
+    /// or by third-party extensions (via `semantic_token_rules.json` in their language
+    /// directories). They are stored separately from the global rules and are only
+    /// applied to buffers of the matching language by the `SemanticTokenStylizer`.
     ///
-    /// These should be registered before any `SemanticTokenStylizer` instances are
-    /// created (typically during `languages::init`), as existing cached stylizers
-    /// are not automatically invalidated.
+    /// This triggers a settings recomputation so that observers (e.g. `LspStore`)
+    /// are notified and can invalidate cached stylizers.
     pub fn set_language_semantic_token_rules(
         &mut self,
         language: SharedString,
         rules: SemanticTokenRules,
+        cx: &mut App,
     ) {
         self.language_semantic_token_rules.insert(language, rules);
+        self.recompute_values(None, cx);
+    }
+
+    /// Removes language-specific semantic token rules for the given language.
+    ///
+    /// This should be called when an extension that registered rules for a language
+    /// is unloaded. Triggers a settings recomputation so that observers (e.g.
+    /// `LspStore`) are notified and can invalidate cached stylizers.
+    pub fn remove_language_semantic_token_rules(&mut self, language: &str, cx: &mut App) {
+        self.language_semantic_token_rules.remove(language);
+        self.recompute_values(None, cx);
     }
 
     /// Returns the language-specific semantic token rules for the given language,

docs/src/extensions/languages.md 🔗

@@ -434,6 +434,40 @@ The `semantic_tokens` setting accepts the following values:
 - `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting.
 - `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
 
+#### Extension-Provided Semantic Token Rules
+
+Language extensions can ship default semantic token rules for their language server's custom token types. To do this, place a `semantic_token_rules.json` file in the language directory alongside `config.toml`:
+
+```
+my-extension/
+  languages/
+    my-language/
+      config.toml
+      highlights.scm
+      semantic_token_rules.json
+```
+
+The file uses the same format as the `semantic_token_rules` array in user settings — a JSON array of rule objects:
+
+```json
+[
+  {
+    "token_type": "lifetime",
+    "style": ["lifetime"]
+  },
+  {
+    "token_type": "builtinType",
+    "style": ["type"]
+  },
+  {
+    "token_type": "selfKeyword",
+    "style": ["variable.special"]
+  }
+]
+```
+
+This is useful when a language server reports custom (non-standard) semantic token types that aren't covered by Zed's built-in default rules. Extension-provided rules act as sensible defaults for that language — users can always override them via `semantic_token_rules` in their settings file, and built-in default rules are only used when neither user nor extension rules match.
+
 #### Customizing Semantic Token Styles
 
 Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme.
@@ -463,7 +497,13 @@ Zed supports customizing the styles used for semantic tokens. You can define rul
 }
 ```
 
-All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules.
+All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted.
+
+Rules are applied in the following priority order (highest to lowest):
+
+1. **User settings** — rules from `semantic_token_rules` in your settings file.
+2. **Extension rules** — rules from `semantic_token_rules.json` in extension language directories.
+3. **Default rules** — Zed's built-in rules for standard LSP token types.
 
 Each rule in the `semantic_token_rules` array is defined as follows:
 

docs/src/semantic-tokens.md 🔗

@@ -48,7 +48,7 @@ You can configure this globally or per-language:
 Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key.
 
 Rules are matched in order, and the first matching rule wins.
-User-defined rules take precedence over defaults.
+User-defined rules take highest precedence, followed by extension-provided language rules, then Zed defaults.
 
 ### Rule Structure
 
@@ -139,7 +139,7 @@ To disable highlighting for a specific token type, add an empty rule that matche
 }
 ```
 
-Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
+Since user rules take highest precedence and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
 
 ## Default Rules