Follow-up semantic token fixes (#48485)

Kirill Bulatov created

Follow-up of https://github.com/zed-industries/zed/pull/46356

* better selection in highlights panel to show the color of the active
item
* no `DiffbaselessAnchor` entity to cause flickering and highlighting
issues
* react on theme and theme settings change and refresh semantic tokens
* do not eagerly flicker when refreshing the tokens
* do not show semantic tokens in the editor menu for non-applicable
buffers

Release Notes:

- N/A

Change summary

crates/collab/tests/integration/editor_tests.rs    |   2 
crates/editor/src/display_map.rs                   |   7 
crates/editor/src/display_map/custom_highlights.rs |   8 
crates/editor/src/editor.rs                        |  75 +-
crates/editor/src/semantic_tokens.rs               | 440 +++++++++++++--
crates/language_tools/src/highlights_tree_view.rs  |  17 
crates/multi_buffer/src/anchor.rs                  | 227 --------
crates/multi_buffer/src/multi_buffer.rs            |  21 
crates/project/src/lsp_store.rs                    |  41 -
crates/project/src/lsp_store/semantic_tokens.rs    |  47 +
crates/project/src/project.rs                      |  26 
crates/zed/src/zed/quick_action_bar.rs             |  44 +
12 files changed, 550 insertions(+), 405 deletions(-)

Detailed changes

crates/collab/tests/integration/editor_tests.rs 🔗

@@ -24,7 +24,7 @@ use gpui::{
 use indoc::indoc;
 use language::{FakeLspAdapter, language_settings::language_settings, rust_lang};
 use lsp::LSP_REQUEST_TIMEOUT;
-use multi_buffer::DiffbaselessAnchorRangeExt as _;
+use multi_buffer::AnchorRangeExt as _;
 use pretty_assertions::assert_eq;
 use project::{
     ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,

crates/editor/src/display_map.rs 🔗

@@ -99,9 +99,8 @@ use gpui::{
 };
 use language::{Point, Subscription as BufferSubscription, language_settings::language_settings};
 use multi_buffer::{
-    Anchor, AnchorRangeExt, DiffbaselessAnchor, ExcerptId, MultiBuffer, MultiBufferOffset,
-    MultiBufferOffsetUtf16, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo,
-    ToOffset, ToPoint,
+    Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
+    MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint,
 };
 use project::project_settings::DiagnosticSeverity;
 use project::{InlayId, lsp_store::TokenType};
@@ -373,7 +372,7 @@ pub struct HighlightStyleId(u32);
 /// A `SemanticToken`, but positioned to an offset in a buffer, and stylized.
 #[derive(Debug, Clone)]
 pub struct SemanticTokenHighlight {
-    pub range: Range<DiffbaselessAnchor>,
+    pub range: Range<Anchor>,
     pub style: HighlightStyleId,
     pub token_type: TokenType,
     pub token_modifiers: u32,

crates/editor/src/display_map/custom_highlights.rs 🔗

@@ -116,12 +116,8 @@ fn create_highlight_endpoints(
         }
     }
     if let Some(semantic_token_highlights) = semantic_token_highlights {
-        let Ok(start) = buffer.anchor_after(range.start).try_into() else {
-            return highlight_endpoints.into_iter().peekable();
-        };
-        let Ok(end) = buffer.anchor_after(range.end).try_into() else {
-            return highlight_endpoints.into_iter().peekable();
-        };
+        let start = buffer.anchor_after(range.start);
+        let end = buffer.anchor_after(range.end);
         for buffer_id in buffer.buffer_ids_for_range(range.clone()) {
             let Some((semantic_token_highlights, interner)) =
                 semantic_token_highlights.get(&buffer_id)

crates/editor/src/editor.rs 🔗

@@ -72,9 +72,9 @@ pub use items::MAX_TAB_TITLE_LEN;
 pub use lsp::CompletionContext;
 pub use lsp_ext::lsp_tasks;
 pub use multi_buffer::{
-    Anchor, AnchorRangeExt, BufferOffset, DiffbaselessAnchor, DiffbaselessAnchorRangeExt,
-    ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
-    MultiBufferSnapshot, PathKey, RowInfo, ToOffset, ToPoint,
+    Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer,
+    MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset,
+    ToPoint,
 };
 pub use split::{SplitDiffFeatureFlag, SplittableEditor, ToggleLockedCursors, ToggleSplitDiff};
 pub use split_editor_view::SplitEditorView;
@@ -176,8 +176,8 @@ use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, SharedScrol
 use selections_collection::{MutableSelectionsCollection, SelectionsCollection};
 use serde::{Deserialize, Serialize};
 use settings::{
-    GitGutterSetting, RelativeLineNumbers, SemanticTokenRules, Settings, SettingsLocation,
-    SettingsStore, update_settings_file,
+    GitGutterSetting, RelativeLineNumbers, Settings, SettingsLocation, SettingsStore,
+    update_settings_file,
 };
 use smallvec::{SmallVec, smallvec};
 use snippet::Snippet;
@@ -228,6 +228,7 @@ use crate::{
     },
     scroll::{ScrollOffset, ScrollPixelOffset},
     selections_collection::resolve_selections_wrapping_blocks,
+    semantic_tokens::SemanticTokenState,
     signature_help::{SignatureHelpHiddenBy, SignatureHelpState},
 };
 
@@ -1336,10 +1337,7 @@ pub struct Editor {
     applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
     accent_data: Option<AccentData>,
     fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
-    semantic_token_rules: SemanticTokenRules,
-    semantic_tokens_enabled: bool,
-    update_semantic_tokens_task: Task<()>,
-    semantic_tokens_fetched_for_buffers: HashMap<BufferId, clock::Global>,
+    semantic_token_state: SemanticTokenState,
     pub(crate) refresh_matching_bracket_highlights_task: Task<()>,
     refresh_outline_symbols_task: Task<()>,
     outline_symbols: Option<(BufferId, Vec<OutlineItem<Anchor>>)>,
@@ -2585,15 +2583,9 @@ impl Editor {
             on_local_selections_changed: None,
             suppress_selection_callback: false,
             applicable_language_settings: HashMap::default(),
-            semantic_token_rules: ProjectSettings::get_global(cx)
-                .global_lsp_settings
-                .semantic_token_rules
-                .clone(),
+            semantic_token_state: SemanticTokenState::new(cx, full_mode),
             accent_data: None,
             fetched_tree_sitter_chunks: HashMap::default(),
-            semantic_tokens_enabled: full_mode,
-            update_semantic_tokens_task: Task::ready(()),
-            semantic_tokens_fetched_for_buffers: HashMap::default(),
             number_deleted_lines: false,
             refresh_matching_bracket_highlights_task: Task::ready(()),
             refresh_outline_symbols_task: Task::ready(()),
@@ -3141,7 +3133,7 @@ impl Editor {
             show_line_numbers: self.show_line_numbers,
             number_deleted_lines: self.number_deleted_lines,
             show_git_diff_gutter: self.show_git_diff_gutter,
-            semantic_tokens_enabled: self.semantic_tokens_enabled,
+            semantic_tokens_enabled: self.semantic_token_state.enabled(),
             show_code_actions: self.show_code_actions,
             show_runnables: self.show_runnables,
             show_breakpoints: self.show_breakpoints,
@@ -23911,8 +23903,8 @@ impl Editor {
                     )
                     .detach();
                 }
-                self.semantic_tokens_fetched_for_buffers
-                    .remove(&buffer.read(cx).remote_id());
+                self.semantic_token_state
+                    .invalidate_buffer(&buffer.read(cx).remote_id());
                 self.update_lsp_data(Some(buffer_id), window, cx);
                 self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
                 self.colorize_brackets(false, cx);
@@ -23935,7 +23927,7 @@ impl Editor {
                     self.registered_buffers.remove(buffer_id);
                     self.tasks
                         .retain(|(task_buffer_id, _), _| task_buffer_id != buffer_id);
-                    self.semantic_tokens_fetched_for_buffers.remove(buffer_id);
+                    self.semantic_token_state.invalidate_buffer(buffer_id);
                     self.display_map.update(cx, |display_map, _| {
                         display_map.invalidate_semantic_highlights(*buffer_id);
                     });
@@ -23964,8 +23956,8 @@ impl Editor {
                 for id in ids {
                     self.fetched_tree_sitter_chunks.remove(id);
                     if let Some(buffer) = snapshot.buffer_for_excerpt(*id) {
-                        self.semantic_tokens_fetched_for_buffers
-                            .remove(&buffer.remote_id());
+                        self.semantic_token_state
+                            .invalidate_buffer(&buffer.remote_id());
                     }
                 }
                 self.colorize_brackets(false, cx);
@@ -24141,21 +24133,12 @@ impl Editor {
             cx.emit(EditorEvent::BreadcrumbsChanged);
         }
 
-        let (
-            restore_unsaved_buffers,
-            show_inline_diagnostics,
-            inline_blame_enabled,
-            new_semantic_token_rules,
-        ) = {
+        let (restore_unsaved_buffers, show_inline_diagnostics, inline_blame_enabled) = {
             let project_settings = ProjectSettings::get_global(cx);
             (
                 project_settings.session.restore_unsaved_buffers,
                 project_settings.diagnostics.inline.enabled,
                 project_settings.git.inline_blame.enabled,
-                project_settings
-                    .global_lsp_settings
-                    .semantic_token_rules
-                    .clone(),
             )
         };
         self.buffer_serialization = self
@@ -24211,13 +24194,15 @@ impl Editor {
                 cx,
             );
 
-            if new_semantic_token_rules != self.semantic_token_rules {
-                self.semantic_token_rules = new_semantic_token_rules;
-                self.semantic_tokens_fetched_for_buffers.clear();
-                self.display_map.update(cx, |display_map, _| {
-                    display_map.semantic_token_highlights.clear();
-                });
-                self.update_semantic_tokens(None, None, cx);
+            let new_semantic_token_rules = ProjectSettings::get_global(cx)
+                .global_lsp_settings
+                .semantic_token_rules
+                .clone();
+            if self
+                .semantic_token_state
+                .update_rules(new_semantic_token_rules)
+            {
+                self.refresh_semantic_token_highlights(cx);
             }
         }
 
@@ -24234,6 +24219,8 @@ impl Editor {
             self.accent_data = new_accents;
             self.colorize_brackets(true, cx);
         }
+
+        self.refresh_semantic_token_highlights(cx);
     }
 
     pub fn set_searchable(&mut self, searchable: bool) {
@@ -26418,6 +26405,8 @@ pub trait SemanticsProvider {
 
     fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
 
+    fn supports_semantic_tokens(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
+
     fn document_highlights(
         &self,
         buffer: &Entity<Buffer>,
@@ -26945,6 +26934,14 @@ impl SemanticsProvider for Entity<Project> {
         })
     }
 
+    fn supports_semantic_tokens(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool {
+        self.update(cx, |project, cx| {
+            buffer.update(cx, |buffer, cx| {
+                project.any_language_server_supports_semantic_tokens(buffer, cx)
+            })
+        })
+    }
+
     fn inline_values(
         &self,
         buffer_handle: Entity<Buffer>,

crates/editor/src/semantic_tokens.rs 🔗

@@ -1,16 +1,23 @@
 use std::{collections::hash_map, sync::Arc, time::Duration};
 
-use collections::HashSet;
+use collections::{HashMap, HashSet};
 use futures::future::join_all;
 use gpui::{
-    Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
+    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
 };
 use itertools::Itertools as _;
 use language::language_settings::language_settings;
-use project::lsp_store::{
-    BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer, TokenType,
+use project::{
+    lsp_store::{
+        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
+        TokenType,
+    },
+    project_settings::ProjectSettings,
+};
+use settings::{
+    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
+    SemanticTokenRules, Settings as _,
 };
-use settings::{SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight};
 use text::BufferId;
 use theme::SyntaxTheme;
 use ui::ActiveTheme as _;
@@ -21,9 +28,71 @@ use crate::{
     display_map::{HighlightStyleInterner, SemanticTokenHighlight},
 };
 
+pub(super) struct SemanticTokenState {
+    rules: SemanticTokenRules,
+    enabled: bool,
+    update_task: Task<()>,
+    fetched_for_buffers: HashMap<BufferId, clock::Global>,
+}
+
+impl SemanticTokenState {
+    pub(super) fn new(cx: &App, enabled: bool) -> Self {
+        Self {
+            rules: ProjectSettings::get_global(cx)
+                .global_lsp_settings
+                .semantic_token_rules
+                .clone(),
+            enabled,
+            update_task: Task::ready(()),
+            fetched_for_buffers: HashMap::default(),
+        }
+    }
+
+    pub(super) fn enabled(&self) -> bool {
+        self.enabled
+    }
+
+    pub(super) fn toggle_enabled(&mut self) {
+        self.enabled = !self.enabled;
+    }
+
+    #[cfg(test)]
+    pub(super) fn take_update_task(&mut self) -> Task<()> {
+        std::mem::replace(&mut self.update_task, Task::ready(()))
+    }
+
+    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
+        self.fetched_for_buffers.remove(buffer_id);
+    }
+
+    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
+        if new_rules != self.rules {
+            self.rules = new_rules;
+            true
+        } else {
+            false
+        }
+    }
+}
+
 impl Editor {
+    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
+        let Some(provider) = self.semantics_provider.as_ref() else {
+            return false;
+        };
+
+        let mut supports = false;
+        self.buffer().update(cx, |this, cx| {
+            this.for_each_buffer(|buffer| {
+                supports |= provider.supports_semantic_tokens(buffer, cx);
+            });
+        });
+
+        supports
+    }
+
     pub fn semantic_highlights_enabled(&self) -> bool {
-        self.semantic_tokens_enabled
+        self.semantic_token_state.enabled()
     }
 
     pub fn toggle_semantic_highlights(
@@ -32,7 +101,7 @@ impl Editor {
         _window: &mut gpui::Window,
         cx: &mut Context<Self>,
     ) {
-        self.semantic_tokens_enabled = !self.semantic_tokens_enabled;
+        self.semantic_token_state.toggle_enabled();
         self.update_semantic_tokens(None, None, cx);
     }
 
@@ -42,12 +111,12 @@ impl Editor {
         for_server: Option<RefreshForServer>,
         cx: &mut Context<Self>,
     ) {
-        if !self.mode().is_full() || !self.semantic_tokens_enabled {
-            self.semantic_tokens_fetched_for_buffers.clear();
+        if !self.mode().is_full() || !self.semantic_token_state.enabled() {
+            self.semantic_token_state.fetched_for_buffers.clear();
             self.display_map.update(cx, |display_map, _| {
                 display_map.semantic_token_highlights.clear();
             });
-            self.update_semantic_tokens_task = Task::ready(());
+            self.semantic_token_state.update_task = Task::ready(());
             cx.notify();
             return;
         }
@@ -55,7 +124,8 @@ impl Editor {
         let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
         if for_server.is_some() {
             invalidate_semantic_highlights_for_buffers.extend(
-                self.semantic_tokens_fetched_for_buffers
+                self.semantic_token_state
+                    .fetched_for_buffers
                     .drain()
                     .map(|(buffer_id, _)| buffer_id),
             );
@@ -90,7 +160,7 @@ impl Editor {
             .unique_by(|(buffer_id, _)| *buffer_id)
             .collect::<Vec<_>>();
 
-        self.update_semantic_tokens_task = cx.spawn(async move |editor, cx| {
+        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
             cx.background_executor()
                 .timer(Duration::from_millis(50))
                 .await;
@@ -100,7 +170,7 @@ impl Editor {
                         .into_iter()
                         .filter_map(|(buffer_id, buffer)| {
                             let known_version =
-                                editor.semantic_tokens_fetched_for_buffers.get(&buffer_id);
+                                editor.semantic_token_state.fetched_for_buffers.get(&buffer_id);
                             let query_version = buffer.read(cx).version();
                             if known_version.is_some_and(|known_version| {
                                 !query_version.changed_since(known_version)
@@ -147,7 +217,7 @@ impl Editor {
                         },
                     };
 
-                    match editor.semantic_tokens_fetched_for_buffers.entry(buffer_id) {
+                    match editor.semantic_token_state.fetched_for_buffers.entry(buffer_id) {
                         hash_map::Entry::Occupied(mut o) => {
                             if query_version.changed_since(o.get()) {
                                 o.insert(query_version);
@@ -203,6 +273,11 @@ impl Editor {
             }).ok();
         });
     }
+
+    pub(super) fn refresh_semantic_token_highlights(&mut self, cx: &mut Context<Self>) {
+        self.semantic_token_state.fetched_for_buffers.clear();
+        self.update_semantic_tokens(None, None, cx);
+    }
 }
 
 fn buffer_into_editor_highlights<'a, 'b>(
@@ -211,21 +286,15 @@ fn buffer_into_editor_highlights<'a, 'b>(
     all_excerpts: &'a [multi_buffer::ExcerptId],
     multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
     interner: &'b mut HighlightStyleInterner,
-    cx: &'a gpui::App,
+    cx: &'a App,
 ) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
     buffer_tokens.iter().filter_map(|token| {
-        let multi_buffer_start = all_excerpts
-            .iter()
-            .find_map(|&excerpt_id| {
-                multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.start)
-            })
-            .and_then(|anchor| anchor.try_into().ok())?;
-        let multi_buffer_end = all_excerpts
-            .iter()
-            .find_map(|&excerpt_id| {
-                multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.end)
-            })
-            .and_then(|anchor| anchor.try_into().ok())?;
+        let multi_buffer_start = all_excerpts.iter().find_map(|&excerpt_id| {
+            multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.start)
+        })?;
+        let multi_buffer_end = all_excerpts.iter().find_map(|&excerpt_id| {
+            multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.end)
+        })?;
 
         let style = convert_token(
             stylizer,
@@ -352,8 +421,7 @@ mod tests {
     use language::{Language, LanguageConfig, LanguageMatcher};
     use languages::FakeLspAdapter;
     use multi_buffer::{
-        DiffbaselessAnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
-        MultiBufferOffset,
+        AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
     };
     use project::Project;
     use rope::Point;
@@ -508,17 +576,13 @@ mod tests {
         cx.set_state("ˇfn main() {}");
         assert!(full_request.next().await.is_some());
 
-        let task = cx.update_editor(|e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
         task.await;
 
         cx.set_state("ˇfn main() { a }");
         assert!(full_request.next().await.is_some());
 
-        let task = cx.update_editor(|e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
         task.await;
         assert_eq!(
             extract_semantic_highlights(&cx.editor, &cx),
@@ -606,16 +670,12 @@ mod tests {
         // Initial request, for the empty buffer.
         cx.set_state("ˇfn main() {}");
         assert!(full_request.next().await.is_some());
-        let task = cx.update_editor(|e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
         task.await;
 
         cx.set_state("ˇfn main() { a }");
         assert!(delta_request.next().await.is_some());
-        let task = cx.update_editor(|e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
         task.await;
 
         assert_eq!(
@@ -832,9 +892,7 @@ mod tests {
             editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
         });
         cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(&mut cx, |e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
         cx.run_until_parked();
         task.await;
 
@@ -1094,9 +1152,7 @@ mod tests {
 
         // Initial request.
         cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(&mut cx, |e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
         cx.run_until_parked();
         task.await;
         assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
@@ -1120,9 +1176,7 @@ mod tests {
 
         // Wait for semantic tokens to be re-fetched after expansion.
         cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(&mut cx, |e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
         cx.run_until_parked();
         task.await;
 
@@ -1311,9 +1365,7 @@ mod tests {
 
         // Initial request.
         cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(&mut cx, |e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
         cx.run_until_parked();
         task.await;
         assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
@@ -1327,9 +1379,7 @@ mod tests {
             editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
         });
         cx.executor().advance_clock(Duration::from_millis(200));
-        let task = editor.update_in(&mut cx, |e, _, _| {
-            std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
-        });
+        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
         cx.run_until_parked();
         task.await;
         assert_eq!(
@@ -1478,6 +1528,284 @@ mod tests {
         );
     }
 
+    #[gpui::test]
+    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
+        use collections::IndexMap;
+        use gpui::{Hsla, Rgba, UpdateGlobal as _};
+        use theme::{HighlightStyleContent, ThemeStyleContent};
+
+        init_test(cx, |_| {});
+
+        update_test_language_settings(cx, |language_settings| {
+            language_settings.languages.0.insert(
+                "Rust".into(),
+                LanguageSettingsContent {
+                    semantic_tokens: Some(SemanticTokens::Full),
+                    ..LanguageSettingsContent::default()
+                },
+            );
+        });
+
+        let mut cx = EditorLspTestContext::new_rust(
+            lsp::ServerCapabilities {
+                semantic_tokens_provider: Some(
+                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+                        lsp::SemanticTokensOptions {
+                            legend: lsp::SemanticTokensLegend {
+                                token_types: Vec::from(["function".into()]),
+                                token_modifiers: Vec::new(),
+                            },
+                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+                            ..lsp::SemanticTokensOptions::default()
+                        },
+                    ),
+                ),
+                ..lsp::ServerCapabilities::default()
+            },
+            cx,
+        )
+        .await;
+
+        let mut full_request = cx
+            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+                move |_, _, _| async move {
+                    Ok(Some(lsp::SemanticTokensResult::Tokens(
+                        lsp::SemanticTokens {
+                            data: vec![
+                                0, // delta_line
+                                3, // delta_start
+                                4, // length
+                                0, // token_type (function)
+                                0, // token_modifiers_bitset
+                            ],
+                            result_id: None,
+                        },
+                    )))
+                },
+            );
+
+        cx.set_state("ˇfn main() {}");
+        full_request.next().await;
+        cx.run_until_parked();
+
+        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
+        let initial_color = initial_styles[0].color;
+
+        // Changing experimental_theme_overrides triggers GlobalTheme reload,
+        // which fires theme_changed → refresh_semantic_token_highlights.
+        let red_color: Hsla = Rgba {
+            r: 1.0,
+            g: 0.0,
+            b: 0.0,
+            a: 1.0,
+        }
+        .into();
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
+                        syntax: IndexMap::from_iter([(
+                            "function".to_string(),
+                            HighlightStyleContent {
+                                color: Some("#ff0000".to_string()),
+                                background_color: None,
+                                font_style: None,
+                                font_weight: None,
+                            },
+                        )]),
+                        ..ThemeStyleContent::default()
+                    });
+                });
+            });
+        });
+
+        cx.executor().advance_clock(Duration::from_millis(200));
+        cx.run_until_parked();
+
+        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(styles_after_override.len(), 1);
+        assert_eq!(
+            styles_after_override[0].color,
+            Some(red_color),
+            "Highlight should have red color from theme override"
+        );
+        assert_ne!(
+            styles_after_override[0].color, initial_color,
+            "Color should have changed from initial"
+        );
+
+        // Changing the override to a different color also restyles.
+        let blue_color: Hsla = Rgba {
+            r: 0.0,
+            g: 0.0,
+            b: 1.0,
+            a: 1.0,
+        }
+        .into();
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
+                        syntax: IndexMap::from_iter([(
+                            "function".to_string(),
+                            HighlightStyleContent {
+                                color: Some("#0000ff".to_string()),
+                                background_color: None,
+                                font_style: None,
+                                font_weight: None,
+                            },
+                        )]),
+                        ..ThemeStyleContent::default()
+                    });
+                });
+            });
+        });
+
+        cx.executor().advance_clock(Duration::from_millis(200));
+        cx.run_until_parked();
+
+        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(styles_after_second_override.len(), 1);
+        assert_eq!(
+            styles_after_second_override[0].color,
+            Some(blue_color),
+            "Highlight should have blue color from updated theme override"
+        );
+
+        // Removing overrides reverts to the original theme color.
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.theme.experimental_theme_overrides = None;
+                });
+            });
+        });
+
+        cx.executor().advance_clock(Duration::from_millis(200));
+        cx.run_until_parked();
+
+        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(styles_after_clear.len(), 1);
+        assert_eq!(
+            styles_after_clear[0].color, initial_color,
+            "Highlight should revert to initial color after clearing overrides"
+        );
+    }
+
+    #[gpui::test]
+    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
+        use collections::IndexMap;
+        use gpui::{Hsla, Rgba, UpdateGlobal as _};
+        use theme::{HighlightStyleContent, ThemeStyleContent};
+        use ui::ActiveTheme as _;
+
+        init_test(cx, |_| {});
+
+        update_test_language_settings(cx, |language_settings| {
+            language_settings.languages.0.insert(
+                "Rust".into(),
+                LanguageSettingsContent {
+                    semantic_tokens: Some(SemanticTokens::Full),
+                    ..LanguageSettingsContent::default()
+                },
+            );
+        });
+
+        let mut cx = EditorLspTestContext::new_rust(
+            lsp::ServerCapabilities {
+                semantic_tokens_provider: Some(
+                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+                        lsp::SemanticTokensOptions {
+                            legend: lsp::SemanticTokensLegend {
+                                token_types: Vec::from(["function".into()]),
+                                token_modifiers: Vec::new(),
+                            },
+                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+                            ..lsp::SemanticTokensOptions::default()
+                        },
+                    ),
+                ),
+                ..lsp::ServerCapabilities::default()
+            },
+            cx,
+        )
+        .await;
+
+        let mut full_request = cx
+            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+                move |_, _, _| async move {
+                    Ok(Some(lsp::SemanticTokensResult::Tokens(
+                        lsp::SemanticTokens {
+                            data: vec![
+                                0, // delta_line
+                                3, // delta_start
+                                4, // length
+                                0, // token_type (function)
+                                0, // token_modifiers_bitset
+                            ],
+                            result_id: None,
+                        },
+                    )))
+                },
+            );
+
+        cx.set_state("ˇfn main() {}");
+        full_request.next().await;
+        cx.run_until_parked();
+
+        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
+        let initial_color = initial_styles[0].color;
+
+        // Per-theme overrides (theme_overrides keyed by theme name) also go through
+        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
+        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
+        let green_color: Hsla = Rgba {
+            r: 0.0,
+            g: 1.0,
+            b: 0.0,
+            a: 1.0,
+        }
+        .into();
+        cx.update(|_, cx| {
+            SettingsStore::update_global(cx, |store, cx| {
+                store.update_user_settings(cx, |settings| {
+                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
+                        theme_name.clone(),
+                        ThemeStyleContent {
+                            syntax: IndexMap::from_iter([(
+                                "function".to_string(),
+                                HighlightStyleContent {
+                                    color: Some("#00ff00".to_string()),
+                                    background_color: None,
+                                    font_style: None,
+                                    font_weight: None,
+                                },
+                            )]),
+                            ..ThemeStyleContent::default()
+                        },
+                    )]);
+                });
+            });
+        });
+
+        cx.executor().advance_clock(Duration::from_millis(200));
+        cx.run_until_parked();
+
+        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
+        assert_eq!(styles_after_override.len(), 1);
+        assert_eq!(
+            styles_after_override[0].color,
+            Some(green_color),
+            "Highlight should have green color from per-theme override"
+        );
+        assert_ne!(
+            styles_after_override[0].color, initial_color,
+            "Color should have changed from initial"
+        );
+    }
+
     fn extract_semantic_highlight_styles(
         editor: &Entity<Editor>,
         cx: &TestAppContext,

crates/language_tools/src/highlights_tree_view.rs 🔗

@@ -302,7 +302,7 @@ impl HighlightsTreeView {
                         .buffer(*buffer_id)
                         .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
                     for token in tokens.iter() {
-                        let range: Range<Anchor> = token.range.start.into()..token.range.end.into();
+                        let range = token.range.start..token.range.end;
                         let excerpt_id = range.start.excerpt_id;
                         let (range_display, sort_key) = format_anchor_range(
                             &range,
@@ -443,7 +443,7 @@ impl HighlightsTreeView {
 
     fn render_entry(&self, entry: &HighlightEntry, selected: bool, cx: &App) -> Div {
         let colors = cx.theme().colors();
-        let style_preview = render_style_preview(entry.style, cx);
+        let style_preview = render_style_preview(entry.style, selected, cx);
 
         h_flex()
             .gap_1()
@@ -979,7 +979,7 @@ fn format_anchor_range(
     }
 }
 
-fn render_style_preview(style: HighlightStyle, cx: &App) -> Div {
+fn render_style_preview(style: HighlightStyle, selected: bool, cx: &App) -> Div {
     let colors = cx.theme().colors();
 
     let display_color = style.color.or(style.background_color);
@@ -987,7 +987,11 @@ fn render_style_preview(style: HighlightStyle, cx: &App) -> Div {
     let mut preview = div().px_1().rounded_sm();
 
     if let Some(color) = display_color {
-        preview = preview.bg(color);
+        if selected {
+            preview = preview.border_1().border_color(color).text_color(color);
+        } else {
+            preview = preview.bg(color);
+        }
     } else {
         preview = preview.bg(colors.element_background);
     }
@@ -1016,7 +1020,10 @@ fn render_style_preview(style: HighlightStyle, cx: &App) -> Div {
         parts.join(" ")
     };
 
-    preview.child(Label::new(label_text).size(LabelSize::Small))
+    preview.child(Label::new(label_text).size(LabelSize::Small).when_some(
+        display_color.filter(|_| selected),
+        |label, display_color| label.color(Color::Custom(display_color)),
+    ))
 }
 
 fn format_hsla_as_hex(color: Hsla) -> String {

crates/multi_buffer/src/anchor.rs 🔗

@@ -269,230 +269,3 @@ impl AnchorRangeExt for Range<Anchor> {
         self.start.to_point(content)..self.end.to_point(content)
     }
 }
-
-/// An [`Anchor`] without a diff base anchor.
-///
-/// The main benefit of this type is that it almost half the size of a full anchor.
-/// Store this if you know you are never working with diff base anchors.
-#[derive(Clone, Copy, Eq, PartialEq, Hash)]
-pub struct DiffbaselessAnchor {
-    /// Identifies which excerpt within the multi-buffer this anchor belongs to.
-    /// A multi-buffer can contain multiple excerpts from different buffers.
-    pub excerpt_id: ExcerptId,
-    /// The position within the excerpt's underlying buffer. This is a stable
-    /// reference that remains valid as the buffer text is edited.
-    pub text_anchor: text::Anchor,
-}
-
-impl std::fmt::Debug for DiffbaselessAnchor {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        if self.is_min() {
-            return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id);
-        }
-        if self.is_max() {
-            return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id);
-        }
-
-        f.debug_struct("Anchor")
-            .field("excerpt_id", &self.excerpt_id)
-            .field("text_anchor", &self.text_anchor)
-            .finish()
-    }
-}
-
-impl DiffbaselessAnchor {
-    pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self {
-        Self {
-            excerpt_id,
-            text_anchor,
-        }
-    }
-
-    pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range<text::Anchor>) -> Range<Self> {
-        Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end)
-    }
-
-    pub fn min() -> Self {
-        Self {
-            excerpt_id: ExcerptId::min(),
-            text_anchor: text::Anchor::MIN,
-        }
-    }
-
-    pub fn max() -> Self {
-        Self {
-            excerpt_id: ExcerptId::max(),
-            text_anchor: text::Anchor::MAX,
-        }
-    }
-
-    pub fn is_min(&self) -> bool {
-        self.excerpt_id == ExcerptId::min() && self.text_anchor.is_min()
-    }
-
-    pub fn is_max(&self) -> bool {
-        self.excerpt_id == ExcerptId::max() && self.text_anchor.is_max()
-    }
-
-    pub fn cmp(&self, other: &DiffbaselessAnchor, snapshot: &MultiBufferSnapshot) -> Ordering {
-        if self == other {
-            return Ordering::Equal;
-        }
-
-        let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id);
-        let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id);
-
-        let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot);
-        if excerpt_id_cmp.is_ne() {
-            return excerpt_id_cmp;
-        }
-        if self_excerpt_id == ExcerptId::max()
-            && self.text_anchor.is_max()
-            && self.text_anchor.is_max()
-        {
-            return Ordering::Equal;
-        }
-        if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) {
-            let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer);
-            if text_cmp.is_ne() {
-                return text_cmp;
-            }
-        }
-        Ordering::Equal
-    }
-
-    pub fn bias(&self) -> Bias {
-        self.text_anchor.bias
-    }
-
-    pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> DiffbaselessAnchor {
-        if self.text_anchor.bias != Bias::Left
-            && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
-        {
-            return Self {
-                excerpt_id: excerpt.id,
-                text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
-            };
-        }
-        *self
-    }
-
-    pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> DiffbaselessAnchor {
-        if self.text_anchor.bias != Bias::Right
-            && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
-        {
-            return Self {
-                excerpt_id: excerpt.id,
-                text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
-            };
-        }
-        *self
-    }
-
-    pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
-    where
-        D: MultiBufferDimension
-            + Ord
-            + Sub<Output = D::TextDimension>
-            + Sub<D::TextDimension, Output = D>
-            + AddAssign<D::TextDimension>
-            + Add<D::TextDimension, Output = D>,
-        D::TextDimension: Sub<Output = D::TextDimension> + Ord,
-    {
-        snapshot.summary_for_anchor(&Anchor {
-            excerpt_id: self.excerpt_id,
-            text_anchor: self.text_anchor,
-            diff_base_anchor: None,
-        })
-    }
-
-    pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
-        if self.is_min() || self.is_max() {
-            true
-        } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
-            (self.text_anchor == excerpt.range.context.start
-                || self.text_anchor == excerpt.range.context.end
-                || self.text_anchor.is_valid(&excerpt.buffer))
-                && excerpt.contains_diffbaseless(self)
-        } else {
-            false
-        }
-    }
-}
-
-impl ToOffset for DiffbaselessAnchor {
-    fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset {
-        self.summary(snapshot)
-    }
-    fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 {
-        self.summary(snapshot)
-    }
-}
-
-impl ToPoint for DiffbaselessAnchor {
-    fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
-        self.summary(snapshot)
-    }
-    fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 {
-        self.summary(snapshot)
-    }
-}
-
-pub trait DiffbaselessAnchorRangeExt {
-    fn cmp(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> Ordering;
-    fn includes(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool;
-    fn overlaps(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool;
-    fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<MultiBufferOffset>;
-    fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
-}
-
-impl DiffbaselessAnchorRangeExt for Range<DiffbaselessAnchor> {
-    fn cmp(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> Ordering {
-        match self.start.cmp(&other.start, buffer) {
-            Ordering::Equal => other.end.cmp(&self.end, buffer),
-            ord => ord,
-        }
-    }
-
-    fn includes(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool {
-        self.start.cmp(&other.start, buffer).is_le() && other.end.cmp(&self.end, buffer).is_le()
-    }
-
-    fn overlaps(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool {
-        self.end.cmp(&other.start, buffer).is_ge() && self.start.cmp(&other.end, buffer).is_le()
-    }
-
-    fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<MultiBufferOffset> {
-        self.start.to_offset(content)..self.end.to_offset(content)
-    }
-
-    fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
-        self.start.to_point(content)..self.end.to_point(content)
-    }
-}
-
-pub struct AnchorHasDiffbaseError;
-
-impl TryFrom<Anchor> for DiffbaselessAnchor {
-    type Error = AnchorHasDiffbaseError;
-
-    fn try_from(anchor: Anchor) -> Result<Self, AnchorHasDiffbaseError> {
-        if anchor.diff_base_anchor.is_some() {
-            return Err(AnchorHasDiffbaseError);
-        }
-        Ok(DiffbaselessAnchor {
-            excerpt_id: anchor.excerpt_id,
-            text_anchor: anchor.text_anchor,
-        })
-    }
-}
-
-impl From<DiffbaselessAnchor> for Anchor {
-    fn from(diffbaseless: DiffbaselessAnchor) -> Self {
-        Anchor {
-            excerpt_id: diffbaseless.excerpt_id,
-            text_anchor: diffbaseless.text_anchor,
-            diff_base_anchor: None,
-        }
-    }
-}

crates/multi_buffer/src/multi_buffer.rs 🔗

@@ -6,9 +6,7 @@ mod transaction;
 
 use self::transaction::History;
 
-pub use anchor::{
-    Anchor, AnchorHasDiffbaseError, AnchorRangeExt, DiffbaselessAnchor, DiffbaselessAnchorRangeExt,
-};
+pub use anchor::{Anchor, AnchorRangeExt};
 
 use anyhow::{Result, anyhow};
 use buffer_diff::{
@@ -7312,23 +7310,6 @@ impl Excerpt {
                 .is_ge()
     }
 
-    fn contains_diffbaseless(&self, anchor: &DiffbaselessAnchor) -> bool {
-        (anchor.text_anchor.buffer_id == None
-            || anchor.text_anchor.buffer_id == Some(self.buffer_id))
-            && self
-                .range
-                .context
-                .start
-                .cmp(&anchor.text_anchor, &self.buffer)
-                .is_le()
-            && self
-                .range
-                .context
-                .end
-                .cmp(&anchor.text_anchor, &self.buffer)
-                .is_ge()
-    }
-
     /// The [`Excerpt`]'s start offset in its [`Buffer`]
     fn buffer_start_offset(&self) -> BufferOffset {
         BufferOffset(self.range.context.start.to_offset(&self.buffer))

crates/project/src/lsp_store.rs 🔗

@@ -31,7 +31,7 @@ use crate::{
     lsp_store::{
         self,
         log_store::{GlobalLogStore, LanguageServerKind},
-        semantic_tokens::SemanticTokensData,
+        semantic_tokens::{SemanticTokenConfig, SemanticTokensData},
     },
     manifest_tree::{
         LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, ManifestQueryDelegate,
@@ -144,7 +144,7 @@ pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
 pub use semantic_tokens::{
     BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer, TokenType,
 };
-use settings::SemanticTokenRules;
+
 pub use worktree::{
     Entry, EntryKind, FS_WATCH_LATENCY, File, LocalWorktree, PathChange, ProjectEntryId,
     UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
@@ -3841,12 +3841,9 @@ pub struct LspStore {
     diagnostic_summaries:
         HashMap<WorktreeId, HashMap<Arc<RelPath>, HashMap<LanguageServerId, DiagnosticSummary>>>,
     pub lsp_server_capabilities: HashMap<LanguageServerId, lsp::ServerCapabilities>,
-    semantic_token_stylizers:
-        HashMap<(LanguageServerId, Option<LanguageName>), SemanticTokenStylizer>,
-    semantic_token_rules: SemanticTokenRules,
+    semantic_token_config: SemanticTokenConfig,
     lsp_data: HashMap<BufferId, BufferLspData>,
     next_hint_id: Arc<AtomicUsize>,
-    global_semantic_tokens_mode: settings::SemanticTokens,
 }
 
 #[derive(Debug)]
@@ -4131,8 +4128,6 @@ impl LspStore {
             (Self::maintain_workspace_config(receiver, cx), sender)
         };
 
-        let global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens;
-
         Self {
             mode: LspStoreMode::Local(LocalLspStore {
                 weak: cx.weak_entity(),
@@ -4186,16 +4181,11 @@ impl LspStore {
             nonce: StdRng::from_os_rng().random(),
             diagnostic_summaries: HashMap::default(),
             lsp_server_capabilities: HashMap::default(),
-            semantic_token_stylizers: HashMap::default(),
-            semantic_token_rules: crate::project_settings::ProjectSettings::get_global(cx)
-                .global_lsp_settings
-                .semantic_token_rules
-                .clone(),
+            semantic_token_config: SemanticTokenConfig::new(cx),
             lsp_data: HashMap::default(),
             next_hint_id: Arc::default(),
             active_entry: None,
             _maintain_workspace_config,
-            global_semantic_tokens_mode,
             _maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx),
         }
     }
@@ -4238,7 +4228,6 @@ impl LspStore {
             let (sender, receiver) = watch::channel();
             (Self::maintain_workspace_config(receiver, cx), sender)
         };
-        let global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens;
         Self {
             mode: LspStoreMode::Remote(RemoteLspStore {
                 upstream_client: Some(upstream_client),
@@ -4248,17 +4237,12 @@ impl LspStore {
             last_formatting_failure: None,
             buffer_store,
             worktree_store,
-            global_semantic_tokens_mode,
             languages: languages.clone(),
             language_server_statuses: Default::default(),
             nonce: StdRng::from_os_rng().random(),
             diagnostic_summaries: HashMap::default(),
             lsp_server_capabilities: HashMap::default(),
-            semantic_token_stylizers: HashMap::default(),
-            semantic_token_rules: crate::project_settings::ProjectSettings::get_global(cx)
-                .global_lsp_settings
-                .semantic_token_rules
-                .clone(),
+            semantic_token_config: SemanticTokenConfig::new(cx),
             next_hint_id: Arc::default(),
             lsp_data: HashMap::default(),
             active_entry: None,
@@ -5064,15 +5048,15 @@ impl LspStore {
             .global_lsp_settings
             .semantic_token_rules
             .clone();
-        if new_semantic_token_rules != self.semantic_token_rules {
-            self.semantic_token_rules = new_semantic_token_rules;
-            self.semantic_token_stylizers.clear();
-        }
+        self.semantic_token_config
+            .update_rules(new_semantic_token_rules);
 
         let new_global_semantic_tokens_mode =
             all_language_settings(None, cx).defaults.semantic_tokens;
-        if new_global_semantic_tokens_mode != self.global_semantic_tokens_mode {
-            self.global_semantic_tokens_mode = new_global_semantic_tokens_mode;
+        if self
+            .semantic_token_config
+            .update_global_mode(new_global_semantic_tokens_mode)
+        {
             self.restart_all_language_servers(cx);
         }
 
@@ -12460,8 +12444,7 @@ impl LspStore {
 
     fn cleanup_lsp_data(&mut self, for_server: LanguageServerId) {
         self.lsp_server_capabilities.remove(&for_server);
-        self.semantic_token_stylizers
-            .retain(|&(id, _), _| id != for_server);
+        self.semantic_token_config.remove_server_data(for_server);
         for lsp_data in self.lsp_data.values_mut() {
             lsp_data.remove_server_data(for_server);
         }

crates/project/src/lsp_store/semantic_tokens.rs 🔗

@@ -10,7 +10,7 @@ use futures::{
 };
 use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedString, Task};
 use itertools::Itertools;
-use language::{Buffer, LanguageName};
+use language::{Buffer, LanguageName, language_settings::all_language_settings};
 use lsp::{AdapterServerCapabilities, LSP_REQUEST_TIMEOUT, LanguageServerId};
 use rpc::{TypedEnvelope, proto};
 use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore};
@@ -27,6 +27,48 @@ use crate::{
     project_settings::ProjectSettings,
 };
 
+pub(super) struct SemanticTokenConfig {
+    stylizers: HashMap<(LanguageServerId, Option<LanguageName>), SemanticTokenStylizer>,
+    rules: SemanticTokenRules,
+    global_mode: settings::SemanticTokens,
+}
+
+impl SemanticTokenConfig {
+    pub(super) fn new(cx: &App) -> Self {
+        Self {
+            stylizers: HashMap::default(),
+            rules: ProjectSettings::get_global(cx)
+                .global_lsp_settings
+                .semantic_token_rules
+                .clone(),
+            global_mode: all_language_settings(None, cx).defaults.semantic_tokens,
+        }
+    }
+
+    pub(super) fn remove_server_data(&mut self, server_id: LanguageServerId) {
+        self.stylizers.retain(|&(id, _), _| id != server_id);
+    }
+
+    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
+        if new_rules != self.rules {
+            self.rules = new_rules;
+            self.stylizers.clear();
+            true
+        } else {
+            false
+        }
+    }
+
+    pub(super) fn update_global_mode(&mut self, new_mode: settings::SemanticTokens) -> bool {
+        if new_mode != self.global_mode {
+            self.global_mode = new_mode;
+            true
+        } else {
+            false
+        }
+    }
+}
+
 #[derive(Debug, Clone, Copy)]
 pub struct RefreshForServer {
     pub server_id: LanguageServerId,
@@ -326,7 +368,8 @@ impl LspStore {
         cx: &mut App,
     ) -> Option<&SemanticTokenStylizer> {
         let stylizer = match self
-            .semantic_token_stylizers
+            .semantic_token_config
+            .stylizers
             .entry((server_id, language.cloned()))
         {
             hash_map::Entry::Occupied(o) => o.into_mut(),

crates/project/src/project.rs 🔗

@@ -5726,6 +5726,32 @@ impl Project {
         })
     }
 
+    pub fn any_language_server_supports_semantic_tokens(
+        &self,
+        buffer: &Buffer,
+        cx: &mut App,
+    ) -> bool {
+        let Some(language) = buffer.language().cloned() else {
+            return false;
+        };
+        let lsp_store = self.lsp_store.read(cx);
+        let relevant_language_servers = lsp_store
+            .languages
+            .lsp_adapters(&language.name())
+            .into_iter()
+            .map(|lsp_adapter| lsp_adapter.name())
+            .collect::<HashSet<_>>();
+        lsp_store
+            .language_server_statuses()
+            .filter_map(|(server_id, server_status)| {
+                relevant_language_servers
+                    .contains(&server_status.name)
+                    .then_some(server_id)
+            })
+            .filter_map(|server_id| lsp_store.lsp_server_capabilities.get(&server_id))
+            .any(|capabilities| capabilities.semantic_tokens_provider.is_some())
+    }
+
     pub fn language_server_id_for_name(
         &self,
         buffer: &Buffer,

crates/zed/src/zed/quick_action_bar.rs 🔗

@@ -110,6 +110,8 @@ impl Render for QuickActionBar {
         };
 
         let supports_inlay_hints = editor.update(cx, |editor, cx| editor.supports_inlay_hints(cx));
+        let supports_semantic_tokens =
+            editor.update(cx, |editor, cx| editor.supports_semantic_tokens(cx));
         let editor_value = editor.read(cx);
         let selection_menu_enabled = editor_value.selection_menu_enabled(cx);
         let inlay_hints_enabled = editor_value.inlay_hints_enabled();
@@ -379,7 +381,7 @@ impl Render for QuickActionBar {
                                 );
                             }
 
-                            if is_full {
+                            if supports_semantic_tokens {
                                 menu = menu.toggleable_entry(
                                     "Semantic Highlights",
                                     semantic_highlights_enabled,
@@ -723,26 +725,36 @@ impl ToolbarItemView for QuickActionBar {
             self._inlay_hints_enabled_subscription.take();
 
             if let Some(editor) = active_item.downcast::<Editor>() {
-                let (mut inlay_hints_enabled, mut supports_inlay_hints) =
-                    editor.update(cx, |editor, cx| {
-                        (
-                            editor.inlay_hints_enabled(),
-                            editor.supports_inlay_hints(cx),
-                        )
-                    });
+                let (
+                    mut inlay_hints_enabled,
+                    mut supports_inlay_hints,
+                    mut supports_semantic_tokens,
+                ) = editor.update(cx, |editor, cx| {
+                    (
+                        editor.inlay_hints_enabled(),
+                        editor.supports_inlay_hints(cx),
+                        editor.supports_semantic_tokens(cx),
+                    )
+                });
                 self._inlay_hints_enabled_subscription =
                     Some(cx.observe(&editor, move |_, editor, cx| {
-                        let (new_inlay_hints_enabled, new_supports_inlay_hints) =
-                            editor.update(cx, |editor, cx| {
-                                (
-                                    editor.inlay_hints_enabled(),
-                                    editor.supports_inlay_hints(cx),
-                                )
-                            });
+                        let (
+                            new_inlay_hints_enabled,
+                            new_supports_inlay_hints,
+                            new_supports_semantic_tokens,
+                        ) = editor.update(cx, |editor, cx| {
+                            (
+                                editor.inlay_hints_enabled(),
+                                editor.supports_inlay_hints(cx),
+                                editor.supports_semantic_tokens(cx),
+                            )
+                        });
                         let should_notify = inlay_hints_enabled != new_inlay_hints_enabled
-                            || supports_inlay_hints != new_supports_inlay_hints;
+                            || supports_inlay_hints != new_supports_inlay_hints
+                            || supports_semantic_tokens != new_supports_semantic_tokens;
                         inlay_hints_enabled = new_inlay_hints_enabled;
                         supports_inlay_hints = new_supports_inlay_hints;
+                        supports_semantic_tokens = new_supports_semantic_tokens;
                         if should_notify {
                             cx.notify()
                         }