Detailed changes
@@ -9200,6 +9200,7 @@ dependencies = [
"itertools 0.14.0",
"language",
"lsp",
+ "menu",
"project",
"proto",
"release_channel",
@@ -9734,7 +9735,7 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.95.1"
-source = "git+https://github.com/zed-industries/lsp-types?rev=b71ab4eeb27d9758be8092020a46fe33fbca4e33#b71ab4eeb27d9758be8092020a46fe33fbca4e33"
+source = "git+https://github.com/zed-industries/lsp-types?rev=fb6bcad59522455a041b7eb9579f706e5cfb2d6f#fb6bcad59522455a041b7eb9579f706e5cfb2d6f"
dependencies = [
"bitflags 1.3.2",
"serde",
@@ -15138,6 +15139,7 @@ dependencies = [
"anyhow",
"collections",
"derive_more 0.99.20",
+ "gpui",
"log",
"schemars",
"serde",
@@ -557,7 +557,7 @@ libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
linkify = "0.10.0"
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
-lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "b71ab4eeb27d9758be8092020a46fe33fbca4e33" }
+lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "fb6bcad59522455a041b7eb9579f706e5cfb2d6f" }
mach2 = "0.5"
markup5ever_rcdom = "0.3.0"
metal = "0.29"
@@ -1090,6 +1090,15 @@
//
// This is typically customized on a per-language basis.
"language_servers": ["..."],
+ // Controls how semantic tokens from language servers are used for syntax highlighting.
+ //
+ // Options:
+ // - "off": Do not request semantic tokens from language servers.
+ // - "combined": Use LSP semantic tokens together with tree-sitter highlighting as base.
+ // - "full": Use LSP semantic tokens exclusively to highlight the text, tree-sitter syntax highlighting is off.
+ //
+ // May require language server restart to properly apply.
+ "semantic_tokens": "off",
// When to automatically save edited buffers. This setting can
// take four values.
@@ -2216,6 +2225,36 @@
// Set to 0 to disable auto-dismiss.
"dismiss_timeout_ms": 5000,
},
+ // Rules for highlighting semantic tokens. User-defined rules are prepended to the default
+ // rules (viewable via "Show Default Semantic Token Rules"), so they take precedence.
+ //
+ // Each `rule` has the following properties:
+ // - `token_type`: The LSP semantic token type to customize. If omitted, the rule matches all token types.
+ // - `token_modifiers`: A list of LSP semantic token modifiers to match. All modifiers must be present
+ // to match.
+ // - `style`: A list of styles from the current syntax theme to use. The first style found is used. Any
+ // settings below override that style.
+ // - `foreground_color`: The foreground color to use for the token type, in hex format (e.g., "#ff0000").
+ // - `background_color`: The background color to use for the token type, in hex format.
+ // - `underline`: A boolean or color to underline with, in hex format. If `true`, then the token will be underlined with the text color.
+ // - `strikethrough`: A boolean or color to strikethrough with, in hex format. If `true`, then the token have a strikethrough with the text color.
+ // - `font_weight`: One of "normal", "bold".
+ // - `font_style`: One of "normal", "italic".
+ //
+ // The first matching rule for a token is applied. Because user-defined rules are prepended to the
+ // default rules, a token can be entirely disabled by adding an empty rule that matches it.
+ //
+ // Example: Highlight unresolved references in red and bold:
+ // "semantic_token_rules": [
+ // {
+ // "token_type": "unresolvedReference",
+ // "foreground_color": "#c93f3f",
+ // "font_weight": "bold"
+ // }
+ // ]
+ //
+ // Default rules are viewable via the "zed: show default semantic token rules" action.
+ "semantic_token_rules": [],
},
// Jupyter settings
"jupyter": {
@@ -0,0 +1,239 @@
+// Default semantic token rules for Zed (read-only).
+//
+// These rules map LSP semantic token types to syntax theme styles.
+// To customize, add rules to "semantic_token_rules" in your settings.json.
+// User-defined rules are prepended to these defaults and take precedence.
+//
+// Each rule has the following properties:
+// - `token_type`: The LSP semantic token type to match. If omitted, matches all types.
+// - `token_modifiers`: A list of LSP semantic token modifiers to match. All must be present.
+// - `style`: A list of syntax theme styles to try. The first one found is used.
+// - `foreground_color`: Override foreground color in hex format (e.g., "#ff0000").
+// - `background_color`: Override background color in hex format.
+// - `underline`: Boolean or color to underline with. If `true`, uses text color.
+// - `strikethrough`: Boolean or color. If `true`, uses text color.
+// - `font_weight`: One of "normal", "bold".
+// - `font_style`: One of "normal", "italic".
+//
+// See the VSCode docs [1] and the LSP Spec [2] for reasoning behind these defaults.
+//
+// [1]: https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide#standard-token-types-and-modifiers
+// [2]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes
+[
+ // Types
+ {
+ "token_type": "namespace",
+ "token_modifiers": [],
+ "style": ["namespace", "module", "type"],
+ },
+ {
+ "token_type": "class",
+ "token_modifiers": ["declaration"],
+ "style": ["type.class.definition", "type.definition"],
+ },
+ {
+ "token_type": "class",
+ "token_modifiers": ["definition"],
+ "style": ["type.class.definition", "type.definition"],
+ },
+ {
+ "token_type": "class",
+ "token_modifiers": [],
+ "style": ["type.class", "class", "type"],
+ },
+ {
+ "token_type": "enum",
+ "token_modifiers": ["declaration"],
+ "style": ["type.enum.definition", "type.definition"],
+ },
+ {
+ "token_type": "enum",
+ "token_modifiers": ["definition"],
+ "style": ["type.enum.definition", "type.definition"],
+ },
+ {
+ "token_type": "enum",
+ "token_modifiers": [],
+ "style": ["type.enum", "enum", "type"],
+ },
+ {
+ "token_type": "interface",
+ "token_modifiers": ["declaration"],
+ "style": ["type.interface.definition", "type.definition"],
+ },
+ {
+ "token_type": "interface",
+ "token_modifiers": ["definition"],
+ "style": ["type.interface.definition", "type.definition"],
+ },
+ {
+ "token_type": "interface",
+ "token_modifiers": [],
+ "style": ["type.interface", "interface", "type"],
+ },
+ {
+ "token_type": "struct",
+ "token_modifiers": ["declaration"],
+ "style": ["type.struct.definition", "type.definition"],
+ },
+ {
+ "token_type": "struct",
+ "token_modifiers": ["definition"],
+ "style": ["type.struct.definition", "type.definition"],
+ },
+ {
+ "token_type": "struct",
+ "token_modifiers": [],
+ "style": ["type.struct", "struct", "type"],
+ },
+ {
+ "token_type": "typeParameter",
+ "token_modifiers": ["declaration"],
+ "style": ["type.parameter.definition", "type.definition"],
+ },
+ {
+ "token_type": "typeParameter",
+ "token_modifiers": ["definition"],
+ "style": ["type.parameter.definition", "type.definition"],
+ },
+ {
+ "token_type": "typeParameter",
+ "token_modifiers": [],
+ "style": ["type.parameter", "type"],
+ },
+ {
+ "token_type": "type",
+ "token_modifiers": ["declaration"],
+ "style": ["type.definition"],
+ },
+ {
+ "token_type": "type",
+ "token_modifiers": ["definition"],
+ "style": ["type.definition"],
+ },
+ {
+ "token_type": "type",
+ "token_modifiers": [],
+ "style": ["type"],
+ },
+ // References
+ {
+ "token_type": "parameter",
+ "token_modifiers": [],
+ "style": ["variable"],
+ },
+ {
+ "token_type": "variable",
+ "token_modifiers": ["defaultLibrary", "constant"],
+ "style": ["constant.builtin"],
+ },
+ {
+ "token_type": "variable",
+ "token_modifiers": ["defaultLibrary"],
+ "style": ["variable.builtin"],
+ },
+ {
+ "token_type": "variable",
+ "token_modifiers": ["constant"],
+ "style": ["constant"],
+ },
+ {
+ "token_type": "variable",
+ "token_modifiers": [],
+ "style": ["variable"],
+ },
+ {
+ "token_type": "property",
+ "token_modifiers": [],
+ "style": ["property"],
+ },
+ {
+ "token_type": "enumMember",
+ "token_modifiers": [],
+ "style": ["type.enum.member", "type.enum", "variant"],
+ },
+ {
+ "token_type": "decorator",
+ "token_modifiers": [],
+ "style": ["function.decorator", "function.annotation", "attribute"],
+ },
+ // Declarations in the docs, but in practice, also references
+ {
+ "token_type": "function",
+ "token_modifiers": ["defaultLibrary"],
+ "style": ["function.builtin"],
+ },
+ {
+ "token_type": "function",
+ "token_modifiers": [],
+ "style": ["function"],
+ },
+ {
+ "token_type": "method",
+ "token_modifiers": ["defaultLibrary"],
+ "style": ["function.builtin"],
+ },
+ {
+ "token_type": "method",
+ "token_modifiers": [],
+ "style": ["function.method", "function"],
+ },
+ {
+ "token_type": "macro",
+ "token_modifiers": [],
+ "style": ["function.macro", "function"],
+ },
+ {
+ "token_type": "label",
+ "token_modifiers": [],
+ "style": ["label"],
+ },
+ // Tokens
+ {
+ "token_type": "comment",
+ "token_modifiers": ["documentation"],
+ "style": ["comment.documentation", "comment.doc"],
+ },
+ {
+ "token_type": "comment",
+ "token_modifiers": [],
+ "style": ["comment"],
+ },
+ {
+ "token_type": "string",
+ "token_modifiers": [],
+ "style": ["string"],
+ },
+ {
+ "token_type": "keyword",
+ "token_modifiers": [],
+ "style": ["keyword"],
+ },
+ {
+ "token_type": "number",
+ "token_modifiers": [],
+ "style": ["number"],
+ },
+ {
+ "token_type": "regexp",
+ "token_modifiers": [],
+ "style": ["string.regexp", "string"],
+ },
+ {
+ "token_type": "operator",
+ "token_modifiers": [],
+ "style": ["operator"],
+ },
+ // Not in the VS Code docs, but in the LSP spec.
+ {
+ "token_type": "modifier",
+ "token_modifiers": [],
+ "style": ["keyword.modifier"],
+ },
+ // C#
+ {
+ "token_type": "event",
+ "token_modifiers": [],
+ "style": ["type.event", "type"],
+ },
+]
@@ -235,7 +235,7 @@
"font_weight": 700
},
"enum": {
- "color": "#d07277ff",
+ "color": "#6eb4bfff",
"font_style": null,
"font_weight": null
},
@@ -637,7 +637,7 @@
"font_weight": 700
},
"enum": {
- "color": "#d3604fff",
+ "color": "#3882b7ff",
"font_style": null,
"font_weight": null
},
@@ -27,7 +27,7 @@ use editor::SelectionEffects;
use editor::scroll::ScrollOffset;
use editor::{
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
- MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
+ HighlightKey, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
actions::SelectAll,
display_map::{
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, EditorMargins,
@@ -1432,9 +1432,10 @@ impl InlineAssistant {
}
if foreground_ranges.is_empty() {
- editor.clear_highlights::<InlineAssist>(cx);
+ editor.clear_highlights(HighlightKey::InlineAssist, cx);
} else {
- editor.highlight_text::<InlineAssist>(
+ editor.highlight_text(
+ HighlightKey::InlineAssist,
foreground_ranges,
HighlightStyle {
fade_out: Some(0.6),
@@ -392,6 +392,9 @@ impl Server {
.add_message_handler(create_image_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
+ .add_message_handler(
+ broadcast_project_message_from_host::<proto::RefreshSemanticTokens>,
+ )
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshCodeLens>)
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
@@ -24,6 +24,7 @@ use gpui::{
use indoc::indoc;
use language::{FakeLspAdapter, language_settings::language_settings, rust_lang};
use lsp::LSP_REQUEST_TIMEOUT;
+use multi_buffer::DiffbaselessAnchorRangeExt as _;
use pretty_assertions::assert_eq;
use project::{
ProgressToken, ProjectPath, SERVER_PROGRESS_THROTTLE_TIMEOUT,
@@ -33,7 +34,7 @@ use project::{
use recent_projects::disconnected_overlay::DisconnectedOverlay;
use rpc::RECEIVE_TIMEOUT;
use serde_json::json;
-use settings::{InlayHintSettingsContent, InlineBlameSettings, SettingsStore};
+use settings::{InlayHintSettingsContent, InlineBlameSettings, SemanticTokens, SettingsStore};
use std::{
collections::BTreeSet,
num::NonZeroU32,
@@ -4791,6 +4792,420 @@ fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
.collect()
}
+fn extract_semantic_token_ranges(editor: &Editor, cx: &App) -> Vec<Range<MultiBufferOffset>> {
+ let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ editor
+ .display_map
+ .read(cx)
+ .semantic_token_highlights
+ .iter()
+ .flat_map(|(_, (v, _))| v.iter())
+ .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
+ .collect()
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_mutual_editor_semantic_token_cache_update(
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(cx_a.executor()).await;
+ let executor = cx_a.executor();
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+
+ cx_a.update(editor::init);
+ cx_b.update(editor::init);
+
+ cx_a.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.project.all_languages.defaults.semantic_tokens =
+ Some(SemanticTokens::Full);
+ });
+ });
+ });
+ cx_b.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.project.all_languages.defaults.semantic_tokens =
+ Some(SemanticTokens::Full);
+ });
+ });
+ });
+
+ let capabilities = lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ };
+ client_a.language_registry().add(rust_lang());
+
+ let edits_made = Arc::new(AtomicUsize::new(0));
+ let closure_edits_made = Arc::clone(&edits_made);
+ let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ capabilities: capabilities.clone(),
+ initializer: Some(Box::new(move |fake_language_server| {
+ let closure_edits_made = closure_edits_made.clone();
+ fake_language_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _| {
+ let edits_made_2 = Arc::clone(&closure_edits_made);
+ async move {
+ let edits_made =
+ AtomicUsize::load(&edits_made_2, atomic::Ordering::Acquire);
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ edits_made as u32 + 4, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: None,
+ },
+ )))
+ }
+ },
+ );
+ })),
+ ..FakeLspAdapter::default()
+ },
+ );
+ client_b.language_registry().add(rust_lang());
+ client_b.language_registry().register_fake_lsp_adapter(
+ "Rust",
+ FakeLspAdapter {
+ capabilities,
+ ..FakeLspAdapter::default()
+ },
+ );
+
+ client_a
+ .fs()
+ .insert_tree(
+ path!("/a"),
+ json!({
+ "main.rs": "fn main() { a }",
+ "other.rs": "// Test file",
+ }),
+ )
+ .await;
+ let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
+ active_call_a
+ .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+ .await
+ .unwrap();
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+ active_call_b
+ .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+ .await
+ .unwrap();
+
+ let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
+
+ let file_a = workspace_a.update_in(cx_a, |workspace, window, cx| {
+ workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx)
+ });
+ let _fake_language_server = fake_language_servers.next().await.unwrap();
+ let editor_a = file_a.await.unwrap().downcast::<Editor>().unwrap();
+ executor.advance_clock(Duration::from_millis(100));
+ executor.run_until_parked();
+
+ let initial_edit = edits_made.load(atomic::Ordering::Acquire);
+ editor_a.update(cx_a, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + initial_edit + 4)],
+ "Host should get its first semantic tokens when opening an editor"
+ );
+ });
+
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+ let editor_b = workspace_b
+ .update_in(cx_b, |workspace, window, cx| {
+ workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+
+ executor.advance_clock(Duration::from_millis(100));
+ executor.run_until_parked();
+ editor_b.update(cx_b, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + initial_edit + 4)],
+ "Client should get its first semantic tokens when opening an editor"
+ );
+ });
+
+ let after_client_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1;
+ editor_b.update_in(cx_b, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([MultiBufferOffset(13)..MultiBufferOffset(13)].clone())
+ });
+ editor.handle_input(":", window, cx);
+ });
+ cx_b.focus(&editor_b);
+
+ executor.advance_clock(Duration::from_secs(1));
+ executor.run_until_parked();
+ editor_a.update(cx_a, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + after_client_edit + 4)],
+ );
+ });
+ editor_b.update(cx_b, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + after_client_edit + 4)],
+ );
+ });
+
+ let after_host_edit = edits_made.fetch_add(1, atomic::Ordering::Release) + 1;
+ editor_a.update_in(cx_a, |editor, window, cx| {
+ editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
+ s.select_ranges([MultiBufferOffset(14)..MultiBufferOffset(14)])
+ });
+ editor.handle_input("a change", window, cx);
+ });
+ cx_a.focus(&editor_a);
+
+ executor.advance_clock(Duration::from_secs(1));
+ executor.run_until_parked();
+ editor_a.update(cx_a, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + after_host_edit + 4)],
+ );
+ });
+ editor_b.update(cx_b, |editor, cx| {
+ let ranges = extract_semantic_token_ranges(editor, cx);
+ assert_eq!(
+ ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(3 + after_host_edit + 4)],
+ );
+ });
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_semantic_token_refresh_is_forwarded(
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ let mut server = TestServer::start(cx_a.executor()).await;
+ let executor = cx_a.executor();
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+
+ cx_a.update(editor::init);
+ cx_b.update(editor::init);
+
+ cx_a.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.project.all_languages.defaults.semantic_tokens = Some(SemanticTokens::Off);
+ });
+ });
+ });
+ cx_b.update(|cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.project.all_languages.defaults.semantic_tokens =
+ Some(SemanticTokens::Full);
+ });
+ });
+ });
+
+ let capabilities = lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ };
+ client_a.language_registry().add(rust_lang());
+ let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
+ "Rust",
+ FakeLspAdapter {
+ capabilities: capabilities.clone(),
+ ..FakeLspAdapter::default()
+ },
+ );
+ client_b.language_registry().add(rust_lang());
+ client_b.language_registry().register_fake_lsp_adapter(
+ "Rust",
+ FakeLspAdapter {
+ capabilities,
+ ..FakeLspAdapter::default()
+ },
+ );
+
+ client_a
+ .fs()
+ .insert_tree(
+ path!("/a"),
+ json!({
+ "main.rs": "fn main() { a }",
+ "other.rs": "// Test file",
+ }),
+ )
+ .await;
+ let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
+ active_call_a
+ .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+ .await
+ .unwrap();
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+
+ let project_b = client_b.join_remote_project(project_id, cx_b).await;
+ active_call_b
+ .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+ .await
+ .unwrap();
+
+ let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+
+ let editor_a = workspace_a
+ .update_in(cx_a, |workspace, window, cx| {
+ workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+
+ let editor_b = workspace_b
+ .update_in(cx_b, |workspace, window, cx| {
+ workspace.open_path((worktree_id, rel_path("main.rs")), None, true, window, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+
+ let other_tokens = Arc::new(AtomicBool::new(false));
+ let fake_language_server = fake_language_servers.next().await.unwrap();
+ let closure_other_tokens = Arc::clone(&other_tokens);
+ fake_language_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(move |params, _| {
+ let task_other_tokens = Arc::clone(&closure_other_tokens);
+ async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
+ );
+ let other_tokens = task_other_tokens.load(atomic::Ordering::Acquire);
+ let (delta_start, length) = if other_tokens { (0, 2) } else { (3, 4) };
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ delta_start,
+ length,
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: None,
+ },
+ )))
+ }
+ })
+ .next()
+ .await
+ .unwrap();
+
+ executor.run_until_parked();
+ editor_a.update(cx_a, |editor, cx| {
+ assert!(
+ extract_semantic_token_ranges(editor, cx).is_empty(),
+ "Host should get no semantic tokens due to them turned off"
+ );
+ });
+
+ executor.run_until_parked();
+ editor_b.update(cx_b, |editor, cx| {
+ assert_eq!(
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
+ extract_semantic_token_ranges(editor, cx),
+ "Client should get its first semantic tokens when opening an editor"
+ );
+ });
+
+ other_tokens.fetch_or(true, atomic::Ordering::Release);
+ fake_language_server
+ .request::<lsp::request::SemanticTokensRefresh>(())
+ .await
+ .into_response()
+ .expect("semantic tokens refresh request failed");
+ // wait out the debounce timeout
+ executor.advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
+ executor.run_until_parked();
+ editor_a.update(cx_a, |editor, cx| {
+ assert!(
+ extract_semantic_token_ranges(editor, cx).is_empty(),
+ "Host should get no semantic tokens due to them turned off, even after the /refresh"
+ );
+ });
+
+ executor.run_until_parked();
+ editor_b.update(cx_b, |editor, cx| {
+ assert_eq!(
+ vec![MultiBufferOffset(0)..MultiBufferOffset(2)],
+ extract_semantic_token_ranges(editor, cx),
+ "Guest should get a /refresh LSP request propagated by host despite host tokens are off"
+ );
+ });
+}
+
#[gpui::test]
async fn test_remote_project_worktree_trust(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let has_restricted_worktrees = |project: &gpui::Entity<project::Project>,
@@ -632,7 +632,7 @@ impl Copilot {
let server = cx
.update(|cx| {
- let mut params = server.default_initialize_params(false, cx);
+ let mut params = server.default_initialize_params(false, false, cx);
params.initialization_options = Some(editor_info_json);
params
.capabilities
@@ -8,7 +8,7 @@ use collections::HashMap;
use dap::{CompletionItem, CompletionItemType, OutputEvent};
use editor::{
Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId,
- MultiBufferOffset, SizingBehavior,
+ HighlightKey, MultiBufferOffset, SizingBehavior,
};
use fuzzy::StringMatchCandidate;
use gpui::{
@@ -222,8 +222,6 @@ impl Console {
console.insert(&output, window, cx);
console.set_read_only(true);
- struct ConsoleAnsiHighlight;
-
let buffer = console.buffer().read(cx).snapshot(cx);
for (range, color) in spans {
@@ -238,8 +236,8 @@ impl Console {
)),
..Default::default()
};
- console.highlight_text_key::<ConsoleAnsiHighlight>(
- start_offset,
+ console.highlight_text_key(
+ HighlightKey::ConsoleAnsiHighlight(start_offset),
vec![range],
style,
false,
@@ -253,8 +251,8 @@ impl Console {
let range = buffer.anchor_after(MultiBufferOffset(range.start))
..buffer.anchor_before(MultiBufferOffset(range.end));
let color_fn = color_fetcher(color);
- console.highlight_background_key::<ConsoleAnsiHighlight>(
- start_offset,
+ console.highlight_background_key(
+ HighlightKey::ConsoleAnsiHighlight(start_offset),
&[range],
move |_, theme| color_fn(theme),
cx,
@@ -6,8 +6,8 @@ use std::{
use collections::HashMap;
use dap::StackFrameId;
use editor::{
- Anchor, Bias, DebugStackFrameLine, Editor, EditorEvent, ExcerptId, ExcerptRange, MultiBuffer,
- RowHighlightOptions, SelectionEffects, ToPoint, scroll::Autoscroll,
+ Anchor, Bias, DebugStackFrameLine, Editor, EditorEvent, ExcerptId, ExcerptRange, HighlightKey,
+ MultiBuffer, RowHighlightOptions, SelectionEffects, ToPoint, scroll::Autoscroll,
};
use gpui::{
App, AppContext, Entity, EventEmitter, Focusable, IntoElement, Render, SharedString,
@@ -150,7 +150,7 @@ impl StackTraceView {
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.refresh_task.take();
self.editor.update(cx, |editor, cx| {
- editor.clear_highlights::<DebugStackFrameLine>(cx)
+ editor.clear_highlights(HighlightKey::DebugStackFrameLine, cx)
});
let stack_frames = self
@@ -431,9 +431,17 @@ fn template_and_validate_json_snippets(book: &mut Book, errors: &mut HashSet<Pre
&snippet_json_fixed,
)?;
}
- label => {
- anyhow::bail!("Unexpected JSON code block tag: {}", label)
+ "semantic_token_rules" => {
+ if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
+ snippet_json_fixed.insert(0, '[');
+ snippet_json_fixed.push_str("\n]");
+ }
+
+ settings::parse_json_with_comments::<settings::SemanticTokenRules>(
+ &snippet_json_fixed,
+ )?;
}
+ label => anyhow::bail!("Unexpected JSON code block tag: {label}"),
};
Ok(())
});
@@ -825,6 +825,8 @@ actions!(
ToggleIndentGuides,
/// Toggles inlay hints display.
ToggleInlayHints,
+ /// Toggles semantic highlights display.
+ ToggleSemanticHighlights,
/// Toggles inline values display.
ToggleInlineValues,
/// Toggles inline diagnostics display.
@@ -4,7 +4,7 @@
use std::ops::Range;
-use crate::Editor;
+use crate::{Editor, HighlightKey};
use collections::HashMap;
use gpui::{Context, HighlightStyle};
use itertools::Itertools;
@@ -12,8 +12,6 @@ use language::language_settings;
use multi_buffer::{Anchor, ExcerptId};
use ui::{ActiveTheme, utils::ensure_minimum_contrast};
-struct ColorizedBracketsHighlight;
-
impl Editor {
pub(crate) fn colorize_brackets(&mut self, invalidate: bool, cx: &mut Context<Editor>) {
if !self.mode.is_full() {
@@ -135,7 +133,7 @@ impl Editor {
);
if invalidate {
- self.clear_highlights::<ColorizedBracketsHighlight>(cx);
+ self.clear_highlights_with(|key| matches!(key, HighlightKey::ColorizeBracket(_)), cx);
}
let editor_background = cx.theme().colors().editor_background;
@@ -147,8 +145,8 @@ impl Editor {
..HighlightStyle::default()
};
- self.highlight_text_key::<ColorizedBracketsHighlight>(
- accent_number,
+ self.highlight_text_key(
+ HighlightKey::ColorizeBracket(accent_number),
bracket_highlights,
style,
true,
@@ -1049,7 +1047,7 @@ mod foo Β«1{
let actual_ranges = cx.update_editor(|editor, window, cx| {
editor
.snapshot(window, cx)
- .all_text_highlight_ranges::<ColorizedBracketsHighlight>()
+ .all_text_highlight_ranges(|key| matches!(key, HighlightKey::ColorizeBracket(_)))
});
let mut highlighted_brackets = HashMap::default();
@@ -1077,7 +1075,7 @@ mod foo Β«1{
let ranges_after_scrolling = cx.update_editor(|editor, window, cx| {
editor
.snapshot(window, cx)
- .all_text_highlight_ranges::<ColorizedBracketsHighlight>()
+ .all_text_highlight_ranges(|key| matches!(key, HighlightKey::ColorizeBracket(_)))
});
let new_last_bracket = ranges_after_scrolling
.iter()
@@ -1105,7 +1103,9 @@ mod foo Β«1{
let colored_brackets = cx.update_editor(|editor, window, cx| {
editor
.snapshot(window, cx)
- .all_text_highlight_ranges::<ColorizedBracketsHighlight>()
+ .all_text_highlight_ranges(|key| {
+ matches!(key, HighlightKey::ColorizeBracket(_))
+ })
});
for (color, range) in colored_brackets.clone() {
assert!(
@@ -1431,7 +1431,8 @@ mod foo Β«1{
offset
}
- let actual_ranges = snapshot.all_text_highlight_ranges::<ColorizedBracketsHighlight>();
+ let actual_ranges = snapshot
+ .all_text_highlight_ranges(|key| matches!(key, HighlightKey::ColorizeBracket(_)));
let editor_text = snapshot.text();
let mut next_index = 1;
@@ -92,18 +92,19 @@ pub use inlay_map::{InlayOffset, InlayPoint};
pub use invisibles::{is_invisible, replacement};
pub use wrap_map::{WrapPoint, WrapRow, WrapSnapshot};
-use collections::{HashMap, HashSet};
+use collections::{HashMap, HashSet, IndexSet};
use gpui::{
App, Context, Entity, EntityId, Font, HighlightStyle, LineLayout, Pixels, UnderlineStyle,
WeakEntity,
};
use language::{Point, Subscription as BufferSubscription, language_settings::language_settings};
use multi_buffer::{
- Anchor, AnchorRangeExt, ExcerptId, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
- MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo, ToOffset, ToPoint,
+ Anchor, AnchorRangeExt, DiffbaselessAnchor, ExcerptId, MultiBuffer, MultiBufferOffset,
+ MultiBufferOffsetUtf16, MultiBufferPoint, MultiBufferRow, MultiBufferSnapshot, RowInfo,
+ ToOffset, ToPoint,
};
-use project::InlayId;
use project::project_settings::DiagnosticSeverity;
+use project::{InlayId, lsp_store::TokenType};
use serde::Deserialize;
use sum_tree::{Bias, TreeMap};
use text::{BufferId, LineIndent, Patch};
@@ -117,7 +118,7 @@ use std::{
fmt::Debug,
iter,
num::NonZeroU32,
- ops::{Add, Bound, Range, Sub},
+ ops::{self, Add, Bound, Range, Sub},
sync::Arc,
};
@@ -136,10 +137,37 @@ pub enum FoldStatus {
Foldable,
}
+/// Keys for tagging text highlights.
+///
+/// Note the order is important as it determines the priority of the highlights, lower means higher priority
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum HighlightKey {
- Type(TypeId),
- TypePlus(TypeId, usize),
+ // Note we want semantic tokens > colorized brackets
+ // to allow language server highlights to work over brackets.
+ ColorizeBracket(usize),
+ SemanticToken,
+ // below is sorted lexicographically, as there is no relevant ordering for these aside from coming after the above
+ BufferSearchHighlights,
+ ConsoleAnsiHighlight(usize),
+ DebugStackFrameLine,
+ DocumentHighlightRead,
+ DocumentHighlightWrite,
+ EditPredictionHighlight,
+ Editor,
+ HighlightOnYank,
+ HighlightsTreeView(usize),
+ HoverState,
+ HoveredLinkState,
+ InlineAssist,
+ InputComposition,
+ MatchingBracket,
+ PendingInput,
+ ProjectSearchView,
+ Rename,
+ SearchWithinRange,
+ SelectedTextHighlight,
+ SyntaxTreeView(usize),
+ VimExchange,
}
pub trait ToDisplayPoint {
@@ -147,7 +175,9 @@ pub trait ToDisplayPoint {
}
type TextHighlights = TreeMap<HighlightKey, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
-type InlayHighlights = TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
+type SemanticTokensHighlights =
+ TreeMap<BufferId, (Arc<[SemanticTokenHighlight]>, Arc<HighlightStyleInterner>)>;
+type InlayHighlights = TreeMap<HighlightKey, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
#[derive(Debug)]
pub struct CompanionExcerptPatch {
@@ -187,6 +217,8 @@ pub struct DisplayMap {
text_highlights: TextHighlights,
/// Regions of inlays that should be highlighted.
inlay_highlights: InlayHighlights,
+ /// The semantic tokens from the language server.
+ pub semantic_token_highlights: SemanticTokensHighlights,
/// A container for explicitly foldable ranges, which supersede indentation based fold range suggestions.
crease_map: CreaseMap,
pub(crate) fold_placeholder: FoldPlaceholder,
@@ -316,6 +348,38 @@ impl Companion {
}
}
+#[derive(Default, Debug)]
+pub struct HighlightStyleInterner {
+ styles: IndexSet<HighlightStyle>,
+}
+
+impl HighlightStyleInterner {
+ pub(crate) fn intern(&mut self, style: HighlightStyle) -> HighlightStyleId {
+ HighlightStyleId(self.styles.insert_full(style).0 as u32)
+ }
+}
+
+impl ops::Index<HighlightStyleId> for HighlightStyleInterner {
+ type Output = HighlightStyle;
+
+ fn index(&self, index: HighlightStyleId) -> &Self::Output {
+ &self.styles[index.0 as usize]
+ }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct HighlightStyleId(u32);
+
+/// A `SemanticToken`, but positioned to an offset in a buffer, and stylized.
+#[derive(Debug, Clone)]
+pub struct SemanticTokenHighlight {
+ pub range: Range<DiffbaselessAnchor>,
+ pub style: HighlightStyleId,
+ pub token_type: TokenType,
+ pub token_modifiers: u32,
+ pub server_id: lsp::LanguageServerId,
+}
+
impl DisplayMap {
pub fn new(
buffer: Entity<MultiBuffer>,
@@ -355,6 +419,7 @@ impl DisplayMap {
diagnostics_max_severity,
text_highlights: Default::default(),
inlay_highlights: Default::default(),
+ semantic_token_highlights: TreeMap::default(),
clip_at_line_ends: false,
masked: false,
companion: None,
@@ -518,6 +583,7 @@ impl DisplayMap {
crease_snapshot: self.crease_map.snapshot(),
text_highlights: self.text_highlights.clone(),
inlay_highlights: self.inlay_highlights.clone(),
+ semantic_token_highlights: self.semantic_token_highlights.clone(),
clip_at_line_ends: self.clip_at_line_ends,
masked: self.masked,
fold_placeholder: self.fold_placeholder.clone(),
@@ -540,6 +606,7 @@ impl DisplayMap {
crease_snapshot: self.crease_map.snapshot(),
text_highlights: self.text_highlights.clone(),
inlay_highlights: self.inlay_highlights.clone(),
+ semantic_token_highlights: self.semantic_token_highlights.clone(),
clip_at_line_ends: self.clip_at_line_ends,
masked: self.masked,
fold_placeholder: self.fold_placeholder.clone(),
@@ -1305,17 +1372,17 @@ impl DisplayMap {
#[instrument(skip_all)]
pub(crate) fn highlight_inlays(
&mut self,
- type_id: TypeId,
+ key: HighlightKey,
highlights: Vec<InlayHighlight>,
style: HighlightStyle,
) {
for highlight in highlights {
- let update = self.inlay_highlights.update(&type_id, |highlights| {
+ let update = self.inlay_highlights.update(&key, |highlights| {
highlights.insert(highlight.inlay, (style, highlight.clone()))
});
if update.is_none() {
self.inlay_highlights.insert(
- type_id,
+ key,
TreeMap::from_ordered_entries([(highlight.inlay, (style, highlight))]),
);
}
@@ -1323,34 +1390,46 @@ impl DisplayMap {
}
#[instrument(skip_all)]
- pub fn text_highlights(&self, type_id: TypeId) -> Option<(HighlightStyle, &[Range<Anchor>])> {
- let highlights = self.text_highlights.get(&HighlightKey::Type(type_id))?;
+ pub fn text_highlights(&self, key: HighlightKey) -> Option<(HighlightStyle, &[Range<Anchor>])> {
+ let highlights = self.text_highlights.get(&key)?;
Some((highlights.0, &highlights.1))
}
- #[cfg(feature = "test-support")]
pub fn all_text_highlights(
&self,
- ) -> impl Iterator<Item = &Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
- self.text_highlights.values()
+ ) -> impl Iterator<Item = (&HighlightKey, &Arc<(HighlightStyle, Vec<Range<Anchor>>)>)> {
+ self.text_highlights.iter()
}
- #[instrument(skip_all)]
- pub fn clear_highlights(&mut self, type_id: TypeId) -> bool {
- let mut cleared = self
- .text_highlights
- .remove(&HighlightKey::Type(type_id))
- .is_some();
- self.text_highlights.retain(|key, _| {
- let retain = if let HighlightKey::TypePlus(key_type_id, _) = key {
- key_type_id != &type_id
- } else {
- true
- };
- cleared |= !retain;
- retain
+ pub fn all_semantic_token_highlights(
+ &self,
+ ) -> impl Iterator<
+ Item = (
+ &BufferId,
+ &(Arc<[SemanticTokenHighlight]>, Arc<HighlightStyleInterner>),
+ ),
+ > {
+ self.semantic_token_highlights.iter()
+ }
+
+ pub fn clear_highlights(&mut self, key: HighlightKey) -> bool {
+ let mut cleared = self.text_highlights.remove(&key).is_some();
+ cleared |= self.inlay_highlights.remove(&key).is_some();
+ cleared
+ }
+
+ pub fn clear_highlights_with(&mut self, mut f: impl FnMut(&HighlightKey) -> bool) -> bool {
+ let mut cleared = false;
+ self.text_highlights.retain(|k, _| {
+ let b = !f(k);
+ cleared |= b;
+ b
+ });
+ self.inlay_highlights.retain(|k, _| {
+ let b = !f(k);
+ cleared |= b;
+ b
});
- cleared |= self.inlay_highlights.remove(&type_id).is_some();
cleared
}
@@ -1538,12 +1617,17 @@ impl DisplayMap {
pub fn is_rewrapping(&self, cx: &gpui::App) -> bool {
self.wrap_map.read(cx).is_rewrapping()
}
+
+ pub fn invalidate_semantic_highlights(&mut self, buffer_id: BufferId) {
+ self.semantic_token_highlights.remove(&buffer_id);
+ }
}
#[derive(Debug, Default)]
pub(crate) struct Highlights<'a> {
pub text_highlights: Option<&'a TextHighlights>,
pub inlay_highlights: Option<&'a InlayHighlights>,
+ pub semantic_token_highlights: Option<&'a SemanticTokensHighlights>,
pub styles: HighlightStyles,
}
@@ -1680,6 +1764,7 @@ pub struct DisplaySnapshot {
block_snapshot: BlockSnapshot,
text_highlights: TextHighlights,
inlay_highlights: InlayHighlights,
+ semantic_token_highlights: SemanticTokensHighlights,
clip_at_line_ends: bool,
masked: bool,
diagnostics_max_severity: DiagnosticSeverity,
@@ -1730,6 +1815,23 @@ impl DisplaySnapshot {
self.buffer_snapshot().len() == MultiBufferOffset(0)
}
+ /// Returns whether tree-sitter syntax highlighting should be used.
+ /// Returns `false` if any buffer with semantic token highlights has the "full" mode setting,
+ /// meaning LSP semantic tokens should replace tree-sitter highlighting.
+ pub fn use_tree_sitter_for_syntax(&self, position: DisplayRow, cx: &App) -> bool {
+ let position = DisplayPoint::new(position, 0);
+ let Some((buffer_snapshot, ..)) = self.point_to_buffer_point(position.to_point(self))
+ else {
+ return false;
+ };
+ let settings = language_settings(
+ buffer_snapshot.language().map(|l| l.name()),
+ buffer_snapshot.file(),
+ cx,
+ );
+ settings.semantic_tokens.use_tree_sitter()
+ }
+
pub fn row_infos(&self, start_row: DisplayRow) -> impl Iterator<Item = RowInfo> + '_ {
self.block_snapshot.row_infos(BlockRow(start_row.0))
}
@@ -1905,6 +2007,7 @@ impl DisplaySnapshot {
Highlights {
text_highlights: Some(&self.text_highlights),
inlay_highlights: Some(&self.inlay_highlights),
+ semantic_token_highlights: Some(&self.semantic_token_highlights),
styles: highlight_styles,
},
)
@@ -1926,7 +2029,7 @@ impl DisplaySnapshot {
},
)
.flat_map(|chunk| {
- let highlight_style = chunk
+ let syntax_highlight_style = chunk
.syntax_highlight_id
.and_then(|id| id.style(&editor_style.syntax));
@@ -1971,10 +2074,14 @@ impl DisplaySnapshot {
..Default::default()
});
- let style = [highlight_style, chunk_highlight, diagnostic_highlight]
- .into_iter()
- .flatten()
- .reduce(|acc, highlight| acc.highlight(highlight));
+ let style = [
+ syntax_highlight_style,
+ chunk_highlight,
+ diagnostic_highlight,
+ ]
+ .into_iter()
+ .flatten()
+ .reduce(|acc, highlight| acc.highlight(highlight));
HighlightedChunk {
text: chunk.text,
@@ -2327,29 +2434,24 @@ impl DisplaySnapshot {
#[cfg(any(test, feature = "test-support"))]
#[instrument(skip_all)]
- pub fn text_highlight_ranges<Tag: ?Sized + 'static>(
+ pub fn text_highlight_ranges(
&self,
+ key: HighlightKey,
) -> Option<Arc<(HighlightStyle, Vec<Range<Anchor>>)>> {
- let type_id = TypeId::of::<Tag>();
- self.text_highlights
- .get(&HighlightKey::Type(type_id))
- .cloned()
+ self.text_highlights.get(&key).cloned()
}
#[cfg(any(test, feature = "test-support"))]
#[instrument(skip_all)]
- pub fn all_text_highlight_ranges<Tag: ?Sized + 'static>(
+ pub fn all_text_highlight_ranges(
&self,
+ f: impl Fn(&HighlightKey) -> bool,
) -> Vec<(gpui::Hsla, Range<Point>)> {
use itertools::Itertools;
- let required_type_id = TypeId::of::<Tag>();
self.text_highlights
.iter()
- .filter(|(key, _)| match key {
- HighlightKey::Type(type_id) => type_id == &required_type_id,
- HighlightKey::TypePlus(type_id, _) => type_id == &required_type_id,
- })
+ .filter(|(key, _)| f(key))
.map(|(_, value)| value.clone())
.flat_map(|ranges| {
ranges
@@ -2366,11 +2468,11 @@ impl DisplaySnapshot {
#[allow(unused)]
#[cfg(any(test, feature = "test-support"))]
- pub(crate) fn inlay_highlights<Tag: ?Sized + 'static>(
+ pub(crate) fn inlay_highlights(
&self,
+ key: HighlightKey,
) -> Option<&TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
- let type_id = TypeId::of::<Tag>();
- self.inlay_highlights.get(&type_id)
+ self.inlay_highlights.get(&key)
}
pub fn buffer_header_height(&self) -> u32 {
@@ -3371,7 +3473,7 @@ pub mod tests {
// Insert a block in the middle of a multi-line diagnostic.
map.update(cx, |map, cx| {
map.highlight_text(
- HighlightKey::Type(TypeId::of::<usize>()),
+ HighlightKey::Editor,
vec![
buffer_snapshot.anchor_before(Point::new(3, 9))
..buffer_snapshot.anchor_after(Point::new(3, 14)),
@@ -3685,8 +3787,6 @@ pub mod tests {
)
});
- enum MyType {}
-
let style = HighlightStyle {
color: Some(Hsla::blue()),
..Default::default()
@@ -3694,7 +3794,7 @@ pub mod tests {
map.update(cx, |map, cx| {
map.highlight_text(
- HighlightKey::Type(TypeId::of::<MyType>()),
+ HighlightKey::Editor,
highlighted_ranges
.into_iter()
.map(|range| MultiBufferOffset(range.start)..MultiBufferOffset(range.end))
@@ -9,7 +9,7 @@ use std::{
vec,
};
-use crate::display_map::{HighlightKey, TextHighlights};
+use crate::display_map::{HighlightKey, SemanticTokensHighlights, TextHighlights};
pub struct CustomHighlightsChunks<'a> {
buffer_chunks: MultiBufferChunks<'a>,
@@ -20,6 +20,7 @@ pub struct CustomHighlightsChunks<'a> {
highlight_endpoints: Peekable<vec::IntoIter<HighlightEndpoint>>,
active_highlights: BTreeMap<HighlightKey, HighlightStyle>,
text_highlights: Option<&'a TextHighlights>,
+ semantic_token_highlights: Option<&'a SemanticTokensHighlights>,
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
@@ -35,6 +36,7 @@ impl<'a> CustomHighlightsChunks<'a> {
range: Range<MultiBufferOffset>,
language_aware: bool,
text_highlights: Option<&'a TextHighlights>,
+ semantic_token_highlights: Option<&'a SemanticTokensHighlights>,
multibuffer_snapshot: &'a MultiBufferSnapshot,
) -> Self {
Self {
@@ -45,17 +47,23 @@ impl<'a> CustomHighlightsChunks<'a> {
highlight_endpoints: create_highlight_endpoints(
&range,
text_highlights,
+ semantic_token_highlights,
multibuffer_snapshot,
),
active_highlights: Default::default(),
multibuffer_snapshot,
+ semantic_token_highlights,
}
}
#[ztracing::instrument(skip_all)]
pub fn seek(&mut self, new_range: Range<MultiBufferOffset>) {
- self.highlight_endpoints =
- create_highlight_endpoints(&new_range, self.text_highlights, self.multibuffer_snapshot);
+ self.highlight_endpoints = create_highlight_endpoints(
+ &new_range,
+ self.text_highlights,
+ self.semantic_token_highlights,
+ self.multibuffer_snapshot,
+ );
self.offset = new_range.start;
self.buffer_chunks.seek(new_range);
self.buffer_chunk.take();
@@ -66,6 +74,7 @@ impl<'a> CustomHighlightsChunks<'a> {
fn create_highlight_endpoints(
range: &Range<MultiBufferOffset>,
text_highlights: Option<&TextHighlights>,
+ semantic_token_highlights: Option<&SemanticTokensHighlights>,
buffer: &MultiBufferSnapshot,
) -> iter::Peekable<vec::IntoIter<HighlightEndpoint>> {
let mut highlight_endpoints = Vec::new();
@@ -105,8 +114,53 @@ fn create_highlight_endpoints(
});
}
}
- highlight_endpoints.sort();
}
+ if let Some(semantic_token_highlights) = semantic_token_highlights {
+ let Ok(start) = buffer.anchor_after(range.start).try_into() else {
+ return highlight_endpoints.into_iter().peekable();
+ };
+ let Ok(end) = buffer.anchor_after(range.end).try_into() else {
+ return highlight_endpoints.into_iter().peekable();
+ };
+ for buffer_id in buffer.buffer_ids_for_range(range.clone()) {
+ let Some((semantic_token_highlights, interner)) =
+ semantic_token_highlights.get(&buffer_id)
+ else {
+ continue;
+ };
+ let start_ix = semantic_token_highlights
+ .binary_search_by(|probe| {
+ probe
+ .range
+ .end
+ .cmp(&start, buffer)
+ .then(cmp::Ordering::Less)
+ })
+ .unwrap_or_else(|i| i);
+ for token in &semantic_token_highlights[start_ix..] {
+ if token.range.start.cmp(&end, buffer).is_ge() {
+ break;
+ }
+
+ let start = token.range.start.to_offset(buffer);
+ let end = token.range.end.to_offset(buffer);
+ if start == end {
+ continue;
+ }
+ highlight_endpoints.push(HighlightEndpoint {
+ offset: start,
+ tag: HighlightKey::SemanticToken,
+ style: Some(interner[token.style]),
+ });
+ highlight_endpoints.push(HighlightEndpoint {
+ offset: end,
+ tag: HighlightKey::SemanticToken,
+ style: None,
+ });
+ }
+ }
+ }
+ highlight_endpoints.sort();
highlight_endpoints.into_iter().peekable()
}
@@ -177,12 +231,13 @@ impl Ord for HighlightEndpoint {
self.offset
.cmp(&other.offset)
.then_with(|| self.style.is_some().cmp(&other.style.is_some()))
+ .then_with(|| self.tag.cmp(&other.tag))
}
}
#[cfg(test)]
mod tests {
- use std::{any::TypeId, sync::Arc};
+ use std::sync::Arc;
use super::*;
use crate::MultiBuffer;
@@ -248,8 +303,7 @@ mod tests {
ranges.push(start_anchor..end_anchor);
}
- let type_id = TypeId::of::<()>(); // Simple type ID for testing
- highlights.insert(HighlightKey::Type(type_id), Arc::new((style, ranges)));
+ highlights.insert(HighlightKey::Editor, Arc::new((style, ranges)));
}
// Get all chunks and verify their bitmaps
@@ -257,6 +311,7 @@ mod tests {
MultiBufferOffset(0)..buffer_snapshot.len(),
false,
None,
+ None,
&buffer_snapshot,
);
@@ -1158,6 +1158,7 @@ impl InlaySnapshot {
buffer_range,
language_aware,
highlights.text_highlights,
+ highlights.semantic_token_highlights,
&self.buffer,
);
@@ -1283,7 +1284,7 @@ mod tests {
use project::{InlayHint, InlayHintLabel, ResolveState};
use rand::prelude::*;
use settings::SettingsStore;
- use std::{any::TypeId, cmp::Reverse, env, sync::Arc};
+ use std::{cmp::Reverse, env, sync::Arc};
use sum_tree::TreeMap;
use text::{Patch, Rope};
use util::RandomCharIter;
@@ -1852,7 +1853,7 @@ mod tests {
text_highlight_ranges.sort_by_key(|range| (range.start, Reverse(range.end)));
log::info!("highlighting text ranges {text_highlight_ranges:?}");
text_highlights.insert(
- HighlightKey::Type(TypeId::of::<()>()),
+ HighlightKey::ColorizeBracket(0),
Arc::new((
HighlightStyle::default(),
text_highlight_ranges
@@ -1906,7 +1907,7 @@ mod tests {
.map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight))),
);
log::info!("highlighting inlay ranges {new_highlights:?}");
- inlay_highlights.insert(TypeId::of::<()>(), new_highlights);
+ inlay_highlights.insert(HighlightKey::Editor, new_highlights);
}
for _ in 0..5 {
@@ -2177,7 +2178,7 @@ mod tests {
inlay_id: InlayId,
highlight_range: Range<usize>,
position: Anchor,
- ) -> TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
+ ) -> TreeMap<HighlightKey, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
let mut inlay_highlights = TreeMap::default();
let mut type_highlights = TreeMap::default();
type_highlights.insert(
@@ -2191,7 +2192,7 @@ mod tests {
},
),
);
- inlay_highlights.insert(TypeId::of::<()>(), type_highlights);
+ inlay_highlights.insert(HighlightKey::Editor, type_highlights);
inlay_highlights
}
@@ -2227,6 +2228,7 @@ mod tests {
let highlights = crate::display_map::Highlights {
text_highlights: None,
inlay_highlights: Some(&inlay_highlights),
+ semantic_token_highlights: None,
styles: crate::display_map::HighlightStyles::default(),
};
@@ -2342,6 +2344,7 @@ mod tests {
let highlights = crate::display_map::Highlights {
text_highlights: None,
inlay_highlights: Some(&inlay_highlights),
+ semantic_token_highlights: None,
styles: crate::display_map::HighlightStyles::default(),
};
@@ -36,6 +36,7 @@ mod persistence;
mod rust_analyzer_ext;
pub mod scroll;
mod selections_collection;
+pub mod semantic_tokens;
mod split;
pub mod split_editor_view;
pub mod tasks;
@@ -51,7 +52,10 @@ mod signature_help;
pub mod test;
pub(crate) use actions::*;
-pub use display_map::{ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder};
+pub use display_map::{
+ ChunkRenderer, ChunkRendererContext, DisplayPoint, FoldPlaceholder, HighlightKey,
+ SemanticTokenHighlight,
+};
pub use edit_prediction_types::Direction;
pub use editor_settings::{
CompletionDetailAlignment, CurrentLineHighlight, DocumentColorsRenderMode, EditorSettings,
@@ -68,9 +72,9 @@ pub use items::MAX_TAB_TITLE_LEN;
pub use lsp::CompletionContext;
pub use lsp_ext::lsp_tasks;
pub use multi_buffer::{
- Anchor, AnchorRangeExt, BufferOffset, ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer,
- MultiBufferOffset, MultiBufferOffsetUtf16, MultiBufferSnapshot, PathKey, RowInfo, ToOffset,
- ToPoint,
+ Anchor, AnchorRangeExt, BufferOffset, DiffbaselessAnchor, DiffbaselessAnchorRangeExt,
+ ExcerptId, ExcerptRange, MBTextSummary, MultiBuffer, MultiBufferOffset, MultiBufferOffsetUtf16,
+ MultiBufferSnapshot, PathKey, RowInfo, ToOffset, ToPoint,
};
pub use split::{SplitDiffFeatureFlag, SplittableEditor, ToggleLockedCursors, ToggleSplitDiff};
pub use split_editor_view::SplitEditorView;
@@ -160,8 +164,8 @@ use project::{
},
git_store::GitStoreEvent,
lsp_store::{
- CacheInlayHints, CompletionDocumentation, FormatTrigger, LspFormatTarget,
- OpenLspBufferHandle,
+ BufferSemanticTokens, CacheInlayHints, CompletionDocumentation, FormatTrigger,
+ LspFormatTarget, OpenLspBufferHandle, RefreshForServer,
},
project_settings::{DiagnosticSeverity, GoToDiagnosticSeverityFilter, ProjectSettings},
};
@@ -172,8 +176,8 @@ use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, SharedScrol
use selections_collection::{MutableSelectionsCollection, SelectionsCollection};
use serde::{Deserialize, Serialize};
use settings::{
- GitGutterSetting, RelativeLineNumbers, Settings, SettingsLocation, SettingsStore,
- update_settings_file,
+ GitGutterSetting, RelativeLineNumbers, SemanticTokenRules, Settings, SettingsLocation,
+ SettingsStore, update_settings_file,
};
use smallvec::{SmallVec, smallvec};
use snippet::Snippet;
@@ -278,11 +282,6 @@ impl ReportEditorEvent {
pub enum ActiveDebugLine {}
pub enum DebugStackFrameLine {}
-enum DocumentHighlightRead {}
-enum DocumentHighlightWrite {}
-enum InputComposition {}
-pub enum PendingInput {}
-enum SelectedTextHighlight {}
pub enum ConflictsOuter {}
pub enum ConflictsOurs {}
@@ -669,8 +668,6 @@ enum EditPredictionSettings {
},
}
-enum EditPredictionHighlight {}
-
#[derive(Debug, Clone)]
struct InlineDiagnostic {
message: SharedString,
@@ -1339,6 +1336,10 @@ pub struct Editor {
applicable_language_settings: HashMap<Option<LanguageName>, LanguageSettings>,
accent_data: Option<AccentData>,
fetched_tree_sitter_chunks: HashMap<ExcerptId, HashSet<Range<BufferRow>>>,
+ semantic_token_rules: SemanticTokenRules,
+ semantic_tokens_enabled: bool,
+ update_semantic_tokens_task: Task<()>,
+ semantic_tokens_fetched_for_buffers: HashMap<BufferId, clock::Global>,
}
#[derive(Debug, PartialEq)]
@@ -1392,6 +1393,7 @@ pub struct EditorSnapshot {
ongoing_scroll: OngoingScroll,
current_line_highlight: CurrentLineHighlight,
gutter_hovered: bool,
+ semantic_tokens_enabled: bool,
}
#[derive(Default, Debug, Clone, Copy)]
@@ -2127,12 +2129,27 @@ impl Editor {
cx,
);
}
- project::Event::LanguageServerRemoved(..) => {
+ project::Event::RefreshSemanticTokens {
+ server_id,
+ request_id,
+ } => {
+ editor.update_semantic_tokens(
+ None,
+ Some(RefreshForServer {
+ server_id: *server_id,
+ request_id: *request_id,
+ }),
+ cx,
+ );
+ }
+ project::Event::LanguageServerRemoved(_server_id) => {
if editor.tasks_update_task.is_none() {
editor.tasks_update_task = Some(editor.refresh_runnables(window, cx));
}
editor.registered_buffers.clear();
editor.register_visible_buffers(cx);
+ editor.update_semantic_tokens(None, None, cx);
+ editor.refresh_inlay_hints(InlayHintRefreshReason::ServerRemoved, cx);
}
project::Event::LanguageServerAdded(..) => {
if editor.tasks_update_task.is_none() {
@@ -2551,8 +2568,15 @@ impl Editor {
on_local_selections_changed: None,
suppress_selection_callback: false,
applicable_language_settings: HashMap::default(),
+ semantic_token_rules: ProjectSettings::get_global(cx)
+ .global_lsp_settings
+ .semantic_token_rules
+ .clone(),
accent_data: None,
fetched_tree_sitter_chunks: HashMap::default(),
+ semantic_tokens_enabled: full_mode,
+ update_semantic_tokens_task: Task::ready(()),
+ semantic_tokens_fetched_for_buffers: HashMap::default(),
number_deleted_lines: false,
};
@@ -3094,6 +3118,7 @@ impl Editor {
show_line_numbers: self.show_line_numbers,
number_deleted_lines: self.number_deleted_lines,
show_git_diff_gutter: self.show_git_diff_gutter,
+ semantic_tokens_enabled: self.semantic_tokens_enabled,
show_code_actions: self.show_code_actions,
show_runnables: self.show_runnables,
show_breakpoints: self.show_breakpoints,
@@ -6926,7 +6951,8 @@ impl Editor {
cx.new(|cx| Editor::for_multibuffer(excerpt_buffer, Some(project), window, cx));
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
editor.update(cx, |editor, cx| {
- editor.highlight_background::<Self>(
+ editor.highlight_background(
+ HighlightKey::Editor,
&ranges_to_highlight,
|_, theme| theme.colors().editor_highlighted_line_background,
cx,
@@ -7267,8 +7293,8 @@ impl Editor {
let (end_word_range, _) = snapshot.surrounding_word(tail_buffer_position, None);
if start_word_range != end_word_range {
self.document_highlights_task.take();
- self.clear_background_highlights::<DocumentHighlightRead>(cx);
- self.clear_background_highlights::<DocumentHighlightWrite>(cx);
+ self.clear_background_highlights(HighlightKey::DocumentHighlightRead, cx);
+ self.clear_background_highlights(HighlightKey::DocumentHighlightWrite, cx);
return None;
}
@@ -7328,12 +7354,14 @@ impl Editor {
}
}
- this.highlight_background::<DocumentHighlightRead>(
+ this.highlight_background(
+ HighlightKey::DocumentHighlightRead,
&read_ranges,
|_, theme| theme.colors().editor_document_highlight_read_background,
cx,
);
- this.highlight_background::<DocumentHighlightWrite>(
+ this.highlight_background(
+ HighlightKey::DocumentHighlightWrite,
&write_ranges,
|_, theme| theme.colors().editor_document_highlight_write_background,
cx,
@@ -7443,13 +7471,14 @@ impl Editor {
editor
.update_in(cx, |editor, _, cx| {
if use_debounce {
- editor.clear_background_highlights::<SelectedTextHighlight>(cx);
+ editor.clear_background_highlights(HighlightKey::SelectedTextHighlight, cx);
editor.debounced_selection_highlight_complete = true;
} else if editor.debounced_selection_highlight_complete {
return;
}
if !match_ranges.is_empty() {
- editor.highlight_background::<SelectedTextHighlight>(
+ editor.highlight_background(
+ HighlightKey::SelectedTextHighlight,
&match_ranges,
|_, theme| theme.colors().editor_document_highlight_bracket_background,
cx,
@@ -7541,7 +7570,7 @@ impl Editor {
let Some((query_text, query_range)) =
self.prepare_highlight_query_from_selection(window, cx)
else {
- self.clear_background_highlights::<SelectedTextHighlight>(cx);
+ self.clear_background_highlights(HighlightKey::SelectedTextHighlight, cx);
self.quick_selection_highlight_task.take();
self.debounced_selection_highlight_task.take();
self.debounced_selection_highlight_complete = false;
@@ -8134,7 +8163,7 @@ impl Editor {
};
self.splice_inlays(&active_edit_prediction.inlay_ids, Default::default(), cx);
- self.clear_highlights::<EditPredictionHighlight>(cx);
+ self.clear_highlights(HighlightKey::EditPredictionHighlight, cx);
self.stale_edit_prediction_in_menu = Some(active_edit_prediction);
true
}
@@ -8519,7 +8548,8 @@ impl Editor {
self.splice_inlays(&[], inlays, cx);
} else {
let background_color = cx.theme().status().deleted_background;
- self.highlight_text::<EditPredictionHighlight>(
+ self.highlight_text(
+ HighlightKey::EditPredictionHighlight,
edits.iter().map(|(range, _)| range.clone()).collect(),
HighlightStyle {
background_color: Some(background_color),
@@ -17570,14 +17600,14 @@ impl Editor {
if let Some((_, read_highlights)) = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<DocumentHighlightRead>()))
+ .get(&HighlightKey::DocumentHighlightRead)
{
all_highlights.extend(read_highlights.iter());
}
if let Some((_, write_highlights)) = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<DocumentHighlightWrite>()))
+ .get(&HighlightKey::DocumentHighlightWrite)
{
all_highlights.extend(write_highlights.iter());
}
@@ -18518,7 +18548,8 @@ impl Editor {
},
);
}
- editor.highlight_background::<Self>(
+ editor.highlight_background(
+ HighlightKey::Editor,
&ranges,
|_, theme| theme.colors().editor_highlighted_line_background,
cx,
@@ -18678,9 +18709,9 @@ impl Editor {
.detach();
let write_highlights =
- this.clear_background_highlights::<DocumentHighlightWrite>(cx);
+ this.clear_background_highlights(HighlightKey::DocumentHighlightWrite, cx);
let read_highlights =
- this.clear_background_highlights::<DocumentHighlightRead>(cx);
+ this.clear_background_highlights(HighlightKey::DocumentHighlightRead, cx);
let ranges = write_highlights
.iter()
.flat_map(|(_, ranges)| ranges.iter())
@@ -18688,7 +18719,8 @@ impl Editor {
.cloned()
.collect();
- this.highlight_text::<Rename>(
+ this.highlight_text(
+ HighlightKey::Rename,
ranges,
HighlightStyle {
fade_out: Some(0.6),
@@ -18819,7 +18851,7 @@ impl Editor {
Some(Autoscroll::fit()),
cx,
);
- self.clear_highlights::<Rename>(cx);
+ self.clear_highlights(HighlightKey::Rename, cx);
self.show_local_selections = true;
if moving_cursor {
@@ -19082,7 +19114,6 @@ impl Editor {
);
});
});
- self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
}
}
@@ -19393,7 +19424,13 @@ impl Editor {
_window: &Window,
cx: &mut Context<Self>,
) -> Option<()> {
- if self.ignore_lsp_data() || !self.diagnostics_enabled() {
+ // `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view,
+ // skip any LSP updates for it.
+
+ if self.active_diagnostics == ActiveDiagnostic::All
+ || !self.mode().is_full()
+ || !self.diagnostics_enabled()
+ {
return None;
}
let pull_diagnostics_settings = ProjectSettings::get_global(cx)
@@ -23191,7 +23228,8 @@ impl Editor {
}
pub fn set_search_within_ranges(&mut self, ranges: &[Range<Anchor>], cx: &mut Context<Self>) {
- self.highlight_background::<SearchWithinRange>(
+ self.highlight_background(
+ HighlightKey::SearchWithinRange,
ranges,
|_, colors| colors.colors().editor_document_highlight_read_background,
cx,
@@ -23203,60 +23241,41 @@ impl Editor {
}
pub fn clear_search_within_ranges(&mut self, cx: &mut Context<Self>) {
- self.clear_background_highlights::<SearchWithinRange>(cx);
+ self.clear_background_highlights(HighlightKey::SearchWithinRange, cx);
}
- pub fn highlight_background<T: 'static>(
+ pub fn highlight_background(
&mut self,
+ key: HighlightKey,
ranges: &[Range<Anchor>],
color_fetcher: impl Fn(&usize, &Theme) -> Hsla + Send + Sync + 'static,
cx: &mut Context<Self>,
) {
- self.background_highlights.insert(
- HighlightKey::Type(TypeId::of::<T>()),
- (Arc::new(color_fetcher), Arc::from(ranges)),
- );
+ self.background_highlights
+ .insert(key, (Arc::new(color_fetcher), Arc::from(ranges)));
self.scrollbar_marker_state.dirty = true;
cx.notify();
}
- pub fn highlight_background_key<T: 'static>(
+ pub fn highlight_background_key(
&mut self,
- key: usize,
+ key: HighlightKey,
ranges: &[Range<Anchor>],
color_fetcher: impl Fn(&usize, &Theme) -> Hsla + Send + Sync + 'static,
cx: &mut Context<Self>,
) {
- self.background_highlights.insert(
- HighlightKey::TypePlus(TypeId::of::<T>(), key),
- (Arc::new(color_fetcher), Arc::from(ranges)),
- );
+ self.background_highlights
+ .insert(key, (Arc::new(color_fetcher), Arc::from(ranges)));
self.scrollbar_marker_state.dirty = true;
cx.notify();
}
- pub fn clear_background_highlights<T: 'static>(
+ pub fn clear_background_highlights(
&mut self,
+ key: HighlightKey,
cx: &mut Context<Self>,
) -> Option<BackgroundHighlight> {
- let text_highlights = self
- .background_highlights
- .remove(&HighlightKey::Type(TypeId::of::<T>()))?;
- if !text_highlights.1.is_empty() {
- self.scrollbar_marker_state.dirty = true;
- cx.notify();
- }
- Some(text_highlights)
- }
-
- pub fn clear_background_highlights_key<T: 'static>(
- &mut self,
- key: usize,
- cx: &mut Context<Self>,
- ) -> Option<BackgroundHighlight> {
- let text_highlights = self
- .background_highlights
- .remove(&HighlightKey::TypePlus(TypeId::of::<T>(), key))?;
+ let text_highlights = self.background_highlights.remove(&key)?;
if !text_highlights.1.is_empty() {
self.scrollbar_marker_state.dirty = true;
cx.notify();
@@ -23352,7 +23371,7 @@ impl Editor {
self.display_map.update(cx, |display_map, _| {
display_map
.all_text_highlights()
- .map(|highlight| {
+ .map(|(_, highlight)| {
let (style, ranges) = highlight.as_ref();
(
*style,
@@ -23402,9 +23421,7 @@ impl Editor {
let highlights = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<
- items::BufferSearchHighlights,
- >()));
+ .get(&HighlightKey::BufferSearchHighlights);
if let Some((_color, ranges)) = highlights {
ranges
@@ -23423,11 +23440,11 @@ impl Editor {
) -> impl 'a + Iterator<Item = &'a Range<Anchor>> {
let read_highlights = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<DocumentHighlightRead>()))
+ .get(&HighlightKey::DocumentHighlightRead)
.map(|h| &h.1);
let write_highlights = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<DocumentHighlightWrite>()))
+ .get(&HighlightKey::DocumentHighlightWrite)
.map(|h| &h.1);
let left_position = position.bias_left(buffer);
let right_position = position.bias_right(buffer);
@@ -23452,9 +23469,9 @@ impl Editor {
})
}
- pub fn has_background_highlights<T: 'static>(&self) -> bool {
+ pub fn has_background_highlights(&self, key: HighlightKey) -> bool {
self.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<T>()))
+ .get(&key)
.is_some_and(|(_, highlights)| !highlights.is_empty())
}
@@ -23568,55 +23585,58 @@ impl Editor {
.collect()
}
- pub fn highlight_text_key<T: 'static>(
+ pub fn highlight_text_key(
&mut self,
- key: usize,
+ key: HighlightKey,
ranges: Vec<Range<Anchor>>,
style: HighlightStyle,
merge: bool,
cx: &mut Context<Self>,
) {
self.display_map.update(cx, |map, cx| {
- map.highlight_text(
- HighlightKey::TypePlus(TypeId::of::<T>(), key),
- ranges,
- style,
- merge,
- cx,
- );
+ map.highlight_text(key, ranges, style, merge, cx);
});
cx.notify();
}
- pub fn highlight_text<T: 'static>(
+ pub fn highlight_text(
&mut self,
+ key: HighlightKey,
ranges: Vec<Range<Anchor>>,
style: HighlightStyle,
cx: &mut Context<Self>,
) {
self.display_map.update(cx, |map, cx| {
- map.highlight_text(
- HighlightKey::Type(TypeId::of::<T>()),
- ranges,
- style,
- false,
- cx,
- )
+ map.highlight_text(key, ranges, style, false, cx)
});
cx.notify();
}
- pub fn text_highlights<'a, T: 'static>(
+ pub fn text_highlights<'a>(
&'a self,
+ key: HighlightKey,
cx: &'a App,
) -> Option<(HighlightStyle, &'a [Range<Anchor>])> {
- self.display_map.read(cx).text_highlights(TypeId::of::<T>())
+ self.display_map.read(cx).text_highlights(key)
}
- pub fn clear_highlights<T: 'static>(&mut self, cx: &mut Context<Self>) {
+ pub fn clear_highlights(&mut self, key: HighlightKey, cx: &mut Context<Self>) {
let cleared = self
.display_map
- .update(cx, |map, _| map.clear_highlights(TypeId::of::<T>()));
+ .update(cx, |map, _| map.clear_highlights(key));
+ if cleared {
+ cx.notify();
+ }
+ }
+
+ pub fn clear_highlights_with(
+ &mut self,
+ f: impl FnMut(&HighlightKey) -> bool,
+ cx: &mut Context<Self>,
+ ) {
+ let cleared = self
+ .display_map
+ .update(cx, |map, _| map.clear_highlights_with(f));
if cleared {
cx.notify();
}
@@ -23805,6 +23825,8 @@ impl Editor {
)
.detach();
}
+ self.semantic_tokens_fetched_for_buffers
+ .remove(&buffer.read(cx).remote_id());
self.update_lsp_data(Some(buffer_id), window, cx);
self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
self.colorize_brackets(false, cx);
@@ -23825,6 +23847,12 @@ impl Editor {
self.refresh_inlay_hints(InlayHintRefreshReason::ExcerptsRemoved(ids.clone()), cx);
for buffer_id in removed_buffer_ids {
self.registered_buffers.remove(buffer_id);
+ self.tasks
+ .retain(|(task_buffer_id, _), _| task_buffer_id != buffer_id);
+ self.semantic_tokens_fetched_for_buffers.remove(buffer_id);
+ self.display_map.update(cx, |display_map, _| {
+ display_map.invalidate_semantic_highlights(*buffer_id);
+ });
}
jsx_tag_auto_close::refresh_enabled_in_any_buffer(self, multibuffer, cx);
cx.emit(EditorEvent::ExcerptsRemoved {
@@ -23846,10 +23874,16 @@ impl Editor {
multi_buffer::Event::ExcerptsExpanded { ids } => {
self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
self.refresh_document_highlights(cx);
+ let snapshot = multibuffer.read(cx).snapshot(cx);
for id in ids {
self.fetched_tree_sitter_chunks.remove(id);
+ if let Some(buffer) = snapshot.buffer_for_excerpt(*id) {
+ self.semantic_tokens_fetched_for_buffers
+ .remove(&buffer.remote_id());
+ }
}
self.colorize_brackets(false, cx);
+ self.update_lsp_data(None, window, cx);
cx.emit(EditorEvent::ExcerptsExpanded { ids: ids.clone() })
}
multi_buffer::Event::Reparsed(buffer_id) => {
@@ -24001,14 +24035,6 @@ impl Editor {
self.update_edit_prediction_settings(cx);
self.refresh_edit_prediction(true, false, window, cx);
self.refresh_inline_values(cx);
- self.refresh_inlay_hints(
- InlayHintRefreshReason::SettingsChange(inlay_hint_settings(
- self.selections.newest_anchor().head(),
- &self.buffer.read(cx).snapshot(cx),
- cx,
- )),
- cx,
- );
let old_cursor_shape = self.cursor_shape;
let old_show_breadcrumbs = self.show_breadcrumbs;
@@ -24029,14 +24055,28 @@ impl Editor {
cx.emit(EditorEvent::BreadcrumbsChanged);
}
- let project_settings = ProjectSettings::get_global(cx);
+ let (
+ restore_unsaved_buffers,
+ show_inline_diagnostics,
+ inline_blame_enabled,
+ new_semantic_token_rules,
+ ) = {
+ let project_settings = ProjectSettings::get_global(cx);
+ (
+ project_settings.session.restore_unsaved_buffers,
+ project_settings.diagnostics.inline.enabled,
+ project_settings.git.inline_blame.enabled,
+ project_settings
+ .global_lsp_settings
+ .semantic_token_rules
+ .clone(),
+ )
+ };
self.buffer_serialization = self
.should_serialize_buffer()
- .then(|| BufferSerialization::new(project_settings.session.restore_unsaved_buffers));
+ .then(|| BufferSerialization::new(restore_unsaved_buffers));
if self.mode.is_full() {
- let show_inline_diagnostics = project_settings.diagnostics.inline.enabled;
- let inline_blame_enabled = project_settings.git.inline_blame.enabled;
if self.show_inline_diagnostics != show_inline_diagnostics {
self.show_inline_diagnostics = show_inline_diagnostics;
self.refresh_inline_diagnostics(false, window, cx);
@@ -24075,6 +24115,24 @@ impl Editor {
}
self.refresh_colors_for_visible_range(None, window, cx);
}
+
+ self.refresh_inlay_hints(
+ InlayHintRefreshReason::SettingsChange(inlay_hint_settings(
+ self.selections.newest_anchor().head(),
+ &self.buffer.read(cx).snapshot(cx),
+ cx,
+ )),
+ cx,
+ );
+
+ if new_semantic_token_rules != self.semantic_token_rules {
+ self.semantic_token_rules = new_semantic_token_rules;
+ self.semantic_tokens_fetched_for_buffers.clear();
+ self.display_map.update(cx, |display_map, _| {
+ display_map.semantic_token_highlights.clear();
+ });
+ self.update_semantic_tokens(None, None, cx);
+ }
}
cx.notify();
@@ -24327,7 +24385,7 @@ impl Editor {
fn marked_text_ranges(&self, cx: &App) -> Option<Vec<Range<MultiBufferOffsetUtf16>>> {
let snapshot = self.buffer.read(cx).read(cx);
- let (_, ranges) = self.text_highlights::<InputComposition>(cx)?;
+ let (_, ranges) = self.text_highlights(HighlightKey::InputComposition, cx)?;
Some(
ranges
.iter()
@@ -24667,7 +24725,7 @@ impl Editor {
}
let existing_pending = self
- .text_highlights::<PendingInput>(cx)
+ .text_highlights(HighlightKey::PendingInput, cx)
.map(|(_, ranges)| ranges.to_vec());
if existing_pending.is_none() && pending.is_empty() {
return;
@@ -24706,9 +24764,10 @@ impl Editor {
.collect();
if pending.is_empty() {
- self.clear_highlights::<PendingInput>(cx);
+ self.clear_highlights(HighlightKey::PendingInput, cx);
} else {
- self.highlight_text::<PendingInput>(
+ self.highlight_text(
+ HighlightKey::PendingInput,
ranges,
HighlightStyle {
underline: Some(UnderlineStyle {
@@ -24729,7 +24788,10 @@ impl Editor {
});
}
- if self.text_highlights::<PendingInput>(cx).is_none() {
+ if self
+ .text_highlights(HighlightKey::PendingInput, cx)
+ .is_none()
+ {
self.ime_transaction.take();
}
}
@@ -25033,10 +25095,11 @@ impl Editor {
self.pull_diagnostics(buffer_id, window, cx);
}
self.refresh_colors_for_visible_range(for_buffer, window, cx);
+ self.update_semantic_tokens(for_buffer, None, cx);
}
fn register_visible_buffers(&mut self, cx: &mut Context<Self>) {
- if self.ignore_lsp_data() {
+ if !self.mode().is_full() {
return;
}
for (_, (visible_buffer, _, _)) in self.visible_excerpts(true, cx) {
@@ -25045,7 +25108,7 @@ impl Editor {
}
fn register_buffer(&mut self, buffer_id: BufferId, cx: &mut Context<Self>) {
- if self.ignore_lsp_data() {
+ if !self.mode().is_full() {
return;
}
@@ -25065,13 +25128,7 @@ impl Editor {
}
}
- fn ignore_lsp_data(&self) -> bool {
- // `ActiveDiagnostic::All` is a special mode where editor's diagnostics are managed by the external view,
- // skip any LSP updates for it.
- self.active_diagnostics == ActiveDiagnostic::All || !self.mode().is_full()
- }
-
- pub(crate) fn create_style(&self, cx: &App) -> EditorStyle {
+ fn create_style(&self, cx: &App) -> EditorStyle {
let settings = ThemeSettings::get_global(cx);
let mut text_style = match self.mode {
@@ -26269,6 +26326,13 @@ pub trait SemanticsProvider {
cx: &mut App,
) -> Option<HashMap<Range<BufferRow>, Task<Result<CacheInlayHints>>>>;
+ fn semantic_tokens(
+ &self,
+ buffer: Entity<Buffer>,
+ refresh: Option<RefreshForServer>,
+ cx: &mut App,
+ ) -> Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>>;
+
fn supports_inlay_hints(&self, buffer: &Entity<Buffer>, cx: &mut App) -> bool;
fn document_highlights(
@@ -26841,6 +26905,17 @@ impl SemanticsProvider for Entity<Project> {
}))
}
+ fn semantic_tokens(
+ &self,
+ buffer: Entity<Buffer>,
+ refresh: Option<RefreshForServer>,
+ cx: &mut App,
+ ) -> Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>> {
+ self.read(cx).lsp_store().update(cx, |lsp_store, cx| {
+ lsp_store.semantic_tokens(buffer, refresh, cx)
+ })
+ }
+
fn range_for_rename(
&self,
buffer: &Entity<Buffer>,
@@ -27499,12 +27574,15 @@ impl EntityInputHandler for Editor {
fn marked_text_range(&self, _: &mut Window, cx: &mut Context<Self>) -> Option<Range<usize>> {
let snapshot = self.buffer.read(cx).read(cx);
- let range = self.text_highlights::<InputComposition>(cx)?.1.first()?;
+ let range = self
+ .text_highlights(HighlightKey::InputComposition, cx)?
+ .1
+ .first()?;
Some(range.start.to_offset_utf16(&snapshot).0.0..range.end.to_offset_utf16(&snapshot).0.0)
}
fn unmark_text(&mut self, _: &mut Window, cx: &mut Context<Self>) {
- self.clear_highlights::<InputComposition>(cx);
+ self.clear_highlights(HighlightKey::InputComposition, cx);
self.ime_transaction.take();
}
@@ -27648,7 +27726,8 @@ impl EntityInputHandler for Editor {
if text.is_empty() {
this.unmark_text(window, cx);
} else {
- this.highlight_text::<InputComposition>(
+ this.highlight_text(
+ HighlightKey::InputComposition,
marked_ranges.clone(),
HighlightStyle {
underline: Some(UnderlineStyle {
@@ -27702,7 +27781,10 @@ impl EntityInputHandler for Editor {
});
}
- if self.text_highlights::<InputComposition>(cx).is_none() {
+ if self
+ .text_highlights(HighlightKey::InputComposition, cx)
+ .is_none()
+ {
self.ime_transaction.take();
}
}
@@ -17614,15 +17614,13 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
});
_ = editor.update(cx, |editor, window, cx| {
- struct Type1;
- struct Type2;
-
let buffer = editor.buffer.read(cx).snapshot(cx);
let anchor_range =
|range: Range<Point>| buffer.anchor_after(range.start)..buffer.anchor_after(range.end);
- editor.highlight_background::<Type1>(
+ editor.highlight_background(
+ HighlightKey::ColorizeBracket(0),
&[
anchor_range(Point::new(2, 1)..Point::new(2, 3)),
anchor_range(Point::new(4, 2)..Point::new(4, 4)),
@@ -17632,7 +17630,8 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
|_, _| Hsla::red(),
cx,
);
- editor.highlight_background::<Type2>(
+ editor.highlight_background(
+ HighlightKey::ColorizeBracket(1),
&[
anchor_range(Point::new(3, 2)..Point::new(3, 5)),
anchor_range(Point::new(5, 3)..Point::new(5, 6)),
@@ -23927,10 +23926,10 @@ async fn test_folding_buffer_when_multibuffer_has_only_one_excerpt(cx: &mut Test
let selection_range = Point::new(1, 0)..Point::new(2, 0);
multi_buffer_editor.update_in(cx, |editor, window, cx| {
- enum TestHighlight {}
let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
let highlight_range = selection_range.clone().to_anchors(&multi_buffer_snapshot);
- editor.highlight_text::<TestHighlight>(
+ editor.highlight_text(
+ HighlightKey::Editor,
vec![highlight_range.clone()],
HighlightStyle::color(Hsla::green()),
cx,
@@ -24856,7 +24855,8 @@ async fn test_rename_with_duplicate_edits(cx: &mut TestAppContext) {
cx.update_editor(|editor, _, cx| {
let highlight_range = Point::new(0, 7)..Point::new(0, 10);
let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx));
- editor.highlight_background::<DocumentHighlightRead>(
+ editor.highlight_background(
+ HighlightKey::DocumentHighlightRead,
&[highlight_range],
|_, theme| theme.colors().editor_document_highlight_read_background,
cx,
@@ -24934,7 +24934,8 @@ async fn test_rename_without_prepare(cx: &mut TestAppContext) {
cx.update_editor(|editor, _window, cx| {
let highlight_range = Point::new(0, 7)..Point::new(0, 10);
let highlight_range = highlight_range.to_anchors(&editor.buffer().read(cx).snapshot(cx));
- editor.highlight_background::<DocumentHighlightRead>(
+ editor.highlight_background(
+ HighlightKey::DocumentHighlightRead,
&[highlight_range],
|_, theme| theme.colors().editor_document_highlight_read_background,
cx,
@@ -27546,7 +27547,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
}
});
- let ensure_result_id = |expected: Option<SharedString>, cx: &mut TestAppContext| {
+ let ensure_result_id = |expected_result_id: Option<SharedString>, cx: &mut TestAppContext| {
project.update(cx, |project, cx| {
let buffer_id = editor
.read(cx)
@@ -27560,7 +27561,7 @@ async fn test_pulling_diagnostics(cx: &mut TestAppContext) {
.lsp_store()
.read(cx)
.result_id_for_buffer_pull(server_id, buffer_id, &None, cx);
- assert_eq!(expected, buffer_result_id);
+ assert_eq!(expected_result_id, buffer_result_id);
});
};
@@ -28361,7 +28362,8 @@ let result = variable * 2;",
.map(|range| range.clone().to_anchors(&buffer_snapshot))
.collect();
- editor.highlight_background::<DocumentHighlightRead>(
+ editor.highlight_background(
+ HighlightKey::DocumentHighlightRead,
&anchor_ranges,
|_, theme| theme.colors().editor_document_highlight_read_background,
cx,
@@ -2,14 +2,14 @@ use crate::{
ActiveDiagnostic, BlockId, CURSORS_VISIBLE_FOR, ChunkRendererContext, ChunkReplacement,
CodeActionSource, ColumnarMode, ConflictsOurs, ConflictsOursMarker, ConflictsOuter,
ConflictsTheirs, ConflictsTheirsMarker, ContextMenuPlacement, CursorShape, CustomBlockId,
- DisplayDiffHunk, DisplayPoint, DisplayRow, DocumentHighlightRead, DocumentHighlightWrite,
- EditDisplayMode, EditPrediction, Editor, EditorMode, EditorSettings, EditorSnapshot,
- EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp,
- HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
- MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown,
- PageUp, PhantomBreakpointIndicator, PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt,
- SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects,
- SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
+ DisplayDiffHunk, DisplayPoint, DisplayRow, EditDisplayMode, EditPrediction, Editor, EditorMode,
+ EditorSettings, EditorSnapshot, EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock,
+ GutterDimensions, HalfPageDown, HalfPageUp, HandleInput, HoveredCursor, InlayHintRefreshReason,
+ JumpData, LineDown, LineHighlight, LineUp, MAX_LINE_LEN, MINIMAP_FONT_SIZE,
+ MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown, PageUp, PhantomBreakpointIndicator,
+ PhantomDiffReviewIndicator, Point, RowExt, RowRangeExt, SelectPhase, Selection,
+ SelectionDragState, SelectionEffects, SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint,
+ ToggleFold, ToggleFoldAll,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
column_pixels,
display_map::{
@@ -27,7 +27,6 @@ use crate::{
POPOVER_RIGHT_OFFSET, hover_at,
},
inlay_hint_settings,
- items::BufferSearchHighlights,
mouse_context_menu::{self, MenuPosition},
scroll::{
ActiveScrollbarState, Autoscroll, ScrollOffset, ScrollPixelOffset, ScrollbarThumbState,
@@ -498,6 +497,7 @@ impl EditorElement {
register_action(editor, window, Editor::toggle_relative_line_numbers);
register_action(editor, window, Editor::toggle_indent_guides);
register_action(editor, window, Editor::toggle_inlay_hints);
+ register_action(editor, window, Editor::toggle_semantic_highlights);
register_action(editor, window, Editor::toggle_edit_predictions);
if editor.read(cx).diagnostics_enabled() {
register_action(editor, window, Editor::toggle_diagnostics);
@@ -1991,13 +1991,13 @@ impl EditorElement {
(is_singleton && scrollbar_settings.git_diff && snapshot.buffer_snapshot().has_diff_hunks())
||
// Buffer Search Results
- (is_singleton && scrollbar_settings.search_results && editor.has_background_highlights::<BufferSearchHighlights>())
+ (is_singleton && scrollbar_settings.search_results && editor.has_background_highlights(HighlightKey::BufferSearchHighlights))
||
// Selected Text Occurrences
- (is_singleton && scrollbar_settings.selected_text && editor.has_background_highlights::<SelectedTextHighlight>())
+ (is_singleton && scrollbar_settings.selected_text && editor.has_background_highlights(HighlightKey::SelectedTextHighlight))
||
// Selected Symbol Occurrences
- (is_singleton && scrollbar_settings.selected_symbol && (editor.has_background_highlights::<DocumentHighlightRead>() || editor.has_background_highlights::<DocumentHighlightWrite>()))
+ (is_singleton && scrollbar_settings.selected_symbol && (editor.has_background_highlights(HighlightKey::DocumentHighlightRead) || editor.has_background_highlights(HighlightKey::DocumentHighlightWrite)))
||
// Diagnostics
(is_singleton && scrollbar_settings.diagnostics != ScrollbarDiagnostics::None && snapshot.buffer_snapshot().has_diagnostics())
@@ -3785,7 +3785,9 @@ impl EditorElement {
})
.collect()
} else {
- let chunks = snapshot.highlighted_chunks(rows.clone(), true, style);
+ let use_tree_sitter = !snapshot.semantic_tokens_enabled
+ || snapshot.use_tree_sitter_for_syntax(rows.start, cx);
+ let chunks = snapshot.highlighted_chunks(rows.clone(), use_tree_sitter, style);
LineWithInvisibles::from_chunks(
chunks,
style,
@@ -6985,15 +6987,13 @@ impl EditorElement {
background_highlights.iter()
{
let is_search_highlights = *background_highlight_id
- == HighlightKey::Type(TypeId::of::<BufferSearchHighlights>());
- let is_text_highlights = *background_highlight_id
- == HighlightKey::Type(TypeId::of::<SelectedTextHighlight>());
+ == HighlightKey::BufferSearchHighlights;
+ let is_text_highlights =
+ *background_highlight_id == HighlightKey::SelectedTextHighlight;
let is_symbol_occurrences = *background_highlight_id
- == HighlightKey::Type(TypeId::of::<DocumentHighlightRead>())
+ == HighlightKey::DocumentHighlightRead
|| *background_highlight_id
- == HighlightKey::Type(
- TypeId::of::<DocumentHighlightWrite>(),
- );
+ == HighlightKey::DocumentHighlightWrite;
if (is_search_highlights && scrollbar_settings.search_results)
|| (is_text_highlights && scrollbar_settings.selected_text)
|| (is_symbol_occurrences && scrollbar_settings.selected_symbol)
@@ -11709,7 +11709,9 @@ pub fn layout_line(
window: &mut Window,
cx: &mut App,
) -> LineWithInvisibles {
- let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), true, style);
+ let use_tree_sitter =
+ !snapshot.semantic_tokens_enabled || snapshot.use_tree_sitter_for_syntax(row, cx);
+ let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), use_tree_sitter, style);
LineWithInvisibles::from_chunks(
chunks,
style,
@@ -1,11 +1,9 @@
-use crate::{Editor, RangeToAnchorExt};
+use crate::{Editor, HighlightKey, RangeToAnchorExt};
use gpui::{Context, HighlightStyle, Window};
use language::CursorShape;
use multi_buffer::MultiBufferOffset;
use theme::ActiveTheme;
-enum MatchingBracketHighlight {}
-
impl Editor {
#[ztracing::instrument(skip_all)]
pub fn refresh_matching_bracket_highlights(
@@ -13,7 +11,7 @@ impl Editor {
window: &Window,
cx: &mut Context<Editor>,
) {
- self.clear_highlights::<MatchingBracketHighlight>(cx);
+ self.clear_highlights(HighlightKey::MatchingBracket, cx);
let snapshot = self.snapshot(window, cx);
let buffer_snapshot = snapshot.buffer_snapshot();
@@ -41,7 +39,8 @@ impl Editor {
if let Some((opening_range, closing_range)) =
buffer_snapshot.innermost_enclosing_bracket_ranges(head..tail, None)
{
- self.highlight_text::<MatchingBracketHighlight>(
+ self.highlight_text(
+ HighlightKey::MatchingBracket,
vec![
opening_range.to_anchors(&buffer_snapshot),
closing_range.to_anchors(&buffer_snapshot),
@@ -118,33 +117,42 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights(
+ HighlightKey::MatchingBracket,
+ indoc! {r#"
pub fn testΒ«(Β»"Test argument"Β«)Β» {
another_test(1, 2, 3);
}
- "#});
+ "#},
+ );
cx.set_state(indoc! {r#"
pub fn test("Test argument") {
another_test(1, Λ2, 3);
}
"#});
- cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights(
+ HighlightKey::MatchingBracket,
+ indoc! {r#"
pub fn test("Test argument") {
another_testΒ«(Β»1, 2, 3Β«)Β»;
}
- "#});
+ "#},
+ );
cx.set_state(indoc! {r#"
pub fn test("Test argument") {
anotherΛ_test(1, 2, 3);
}
"#});
- cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights(
+ HighlightKey::MatchingBracket,
+ indoc! {r#"
pub fn test("Test argument") Β«{Β»
another_test(1, 2, 3);
Β«}Β»
- "#});
+ "#},
+ );
// positioning outside of brackets removes highlight
cx.set_state(indoc! {r#"
@@ -152,11 +160,14 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights(
+ HighlightKey::MatchingBracket,
+ indoc! {r#"
pub fn test("Test argument") {
another_test(1, 2, 3);
}
- "#});
+ "#},
+ );
// non empty selection dismisses highlight
cx.set_state(indoc! {r#"
@@ -164,10 +175,13 @@ mod tests {
another_test(1, 2, 3);
}
"#});
- cx.assert_editor_text_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ cx.assert_editor_text_highlights(
+ HighlightKey::MatchingBracket,
+ indoc! {r#"
pub fn testΒ«("Test argument") {
another_test(1, 2, 3);
}
- "#});
+ "#},
+ );
}
}
@@ -1,8 +1,8 @@
use crate::{
Anchor, Editor, EditorSettings, EditorSnapshot, FindAllReferences, GoToDefinition,
GoToDefinitionSplit, GoToTypeDefinition, GoToTypeDefinitionSplit, GotoDefinitionKind,
- Navigated, PointForPosition, SelectPhase, editor_settings::GoToDefinitionFallback,
- scroll::ScrollAmount,
+ HighlightKey, Navigated, PointForPosition, SelectPhase,
+ editor_settings::GoToDefinitionFallback, scroll::ScrollAmount,
};
use gpui::{App, AsyncWindowContext, Context, Entity, Modifiers, Task, Window, px};
use language::{Bias, ToOffset};
@@ -149,7 +149,7 @@ impl Editor {
pub(crate) fn hide_hovered_link(&mut self, cx: &mut Context<Self>) {
self.hovered_link_state.take();
- self.clear_highlights::<HoveredLinkState>(cx);
+ self.clear_highlights(HighlightKey::HoveredLinkState, cx);
}
pub(crate) fn handle_click_hovered_link(
@@ -415,7 +415,7 @@ pub fn show_link_definition(
this.update(cx, |editor, cx| {
// Clear any existing highlights
- editor.clear_highlights::<HoveredLinkState>(cx);
+ editor.clear_highlights(HighlightKey::HoveredLinkState, cx);
let Some(hovered_link_state) = editor.hovered_link_state.as_mut() else {
editor.hide_hovered_link(cx);
return;
@@ -457,10 +457,18 @@ pub fn show_link_definition(
});
match highlight_range {
- RangeInEditor::Text(text_range) => editor
- .highlight_text::<HoveredLinkState>(vec![text_range], style, cx),
- RangeInEditor::Inlay(highlight) => editor
- .highlight_inlays::<HoveredLinkState>(vec![highlight], style, cx),
+ RangeInEditor::Text(text_range) => editor.highlight_text(
+ HighlightKey::HoveredLinkState,
+ vec![text_range],
+ style,
+ cx,
+ ),
+ RangeInEditor::Inlay(highlight) => editor.highlight_inlays(
+ HighlightKey::HoveredLinkState,
+ vec![highlight],
+ style,
+ cx,
+ ),
}
}
} else {
@@ -843,18 +851,24 @@ mod tests {
requests.next().await;
cx.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
struct A;
let Β«variableΒ» = A;
- "});
+ "},
+ );
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.run_until_parked();
// Assert no link highlights
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
struct A;
let variable = A;
- "});
+ "},
+ );
cx.simulate_click(screen_coord.unwrap(), modifiers);
@@ -912,17 +926,23 @@ mod tests {
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
requests.next().await;
cx.background_executor.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { Β«do_workΒ»(); }
fn do_work() { test(); }
- "});
+ "},
+ );
// Unpress cmd causes highlight to go away
cx.simulate_modifiers_change(Modifiers::none());
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
let mut requests =
cx.set_request_handler::<GotoDefinition, _, _>(move |url, _, _| async move {
@@ -939,10 +959,13 @@ mod tests {
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
requests.next().await;
cx.background_executor.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { Β«do_workΒ»(); }
fn do_work() { test(); }
- "});
+ "},
+ );
// Moving mouse to location with no response dismisses highlight
let hover_point = cx.pixel_position(indoc! {"
@@ -961,10 +984,13 @@ mod tests {
cx.background_executor.run_until_parked();
// Assert no link highlights
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
// // Move mouse without cmd and then pressing cmd triggers highlight
let hover_point = cx.pixel_position(indoc! {"
@@ -974,10 +1000,13 @@ mod tests {
cx.simulate_mouse_move(hover_point, None, Modifiers::none());
// Assert no link highlights
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
let symbol_range = cx.lsp_range(indoc! {"
fn test() { do_work(); }
@@ -1005,23 +1034,32 @@ mod tests {
requests.next().await;
cx.background_executor.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { Β«testΒ»(); }
- "});
+ "},
+ );
cx.deactivate_window();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
cx.background_executor.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { Β«testΒ»(); }
- "});
+ "},
+ );
// Moving again within the same symbol range doesn't re-request
let hover_point = cx.pixel_position(indoc! {"
@@ -1030,10 +1068,13 @@ mod tests {
"});
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
cx.background_executor.run_until_parked();
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { Β«testΒ»(); }
- "});
+ "},
+ );
// Cmd click with existing definition doesn't re-request and dismisses highlight
cx.simulate_click(hover_point, Modifiers::secondary_key());
@@ -1050,10 +1091,13 @@ mod tests {
"});
// Assert no link highlights after jump
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
// Cmd click without existing definition requests and jumps
let hover_point = cx.pixel_position(indoc! {"
@@ -1123,10 +1167,13 @@ mod tests {
cx.simulate_mouse_move(hover_point, None, Modifiers::secondary_key());
cx.background_executor.run_until_parked();
assert!(requests.try_next().is_err());
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
fn test() { do_work(); }
fn do_work() { test(); }
- "});
+ "},
+ );
cx.background_executor.run_until_parked();
}
@@ -1242,7 +1289,7 @@ mod tests {
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let actual_highlights = snapshot
- .inlay_highlights::<HoveredLinkState>()
+ .inlay_highlights(HighlightKey::HoveredLinkState)
.into_iter()
.flat_map(|highlights| highlights.values().map(|(_, highlight)| highlight))
.collect::<Vec<_>>();
@@ -1261,7 +1308,7 @@ mod tests {
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let actual_ranges = snapshot
- .text_highlight_ranges::<HoveredLinkState>()
+ .text_highlight_ranges(HighlightKey::HoveredLinkState)
.map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default();
@@ -1301,9 +1348,12 @@ mod tests {
"});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
Let's test a [complex](Β«https://zed.dev/channel/had-(oops)ΛΒ») case.
- "});
+ "},
+ );
cx.simulate_click(screen_coord, Modifiers::secondary_key());
assert_eq!(
@@ -1329,7 +1379,8 @@ mod tests {
cx.pixel_position(indoc! {"https://zed.dev/relΛeases is a cool webpage."});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
- cx.assert_editor_text_highlights::<HoveredLinkState>(
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
indoc! {"Β«https://zed.dev/releasesΛΒ» is a cool webpage."},
);
@@ -1354,7 +1405,8 @@ mod tests {
cx.pixel_position(indoc! {"A cool webpage is https://zed.dev/releΛases"});
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
- cx.assert_editor_text_highlights::<HoveredLinkState>(
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
indoc! {"A cool webpage is Β«https://zed.dev/releasesΛΒ»"},
);
@@ -1548,7 +1600,7 @@ mod tests {
assert!(
editor
.snapshot(window, cx)
- .text_highlight_ranges::<HoveredLinkState>()
+ .text_highlight_ranges(HighlightKey::HoveredLinkState)
.unwrap_or_default()
.1
.is_empty()
@@ -1575,21 +1627,27 @@ mod tests {
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
#[cfg(not(target_os = "windows"))]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to Β«file2.rsΛΒ» if you want.
Or go to ../dir/file2.rs if you want.
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
#[cfg(target_os = "windows")]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to Β«file2.rsΛΒ» if you want.
Or go to ../dir/file2.rs if you want.
Or go to C:/root/dir/file2.rs if project is local.
Or go to C:/root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
// Moving the mouse over a relative path that does exist should highlight it
#[cfg(not(target_os = "windows"))]
@@ -1611,21 +1669,27 @@ mod tests {
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
#[cfg(not(target_os = "windows"))]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to Β«../dir/file2.rsΛΒ» if you want.
Or go to /root/dir/file2.rs if project is local.
Or go to /root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
#[cfg(target_os = "windows")]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to Β«../dir/file2.rsΛΒ» if you want.
Or go to C:/root/dir/file2.rs if project is local.
Or go to C:/root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
// Moving the mouse over an absolute path that does exist should highlight it
#[cfg(not(target_os = "windows"))]
@@ -1648,21 +1712,27 @@ mod tests {
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
#[cfg(not(target_os = "windows"))]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to ../dir/file2.rs if you want.
Or go to Β«/root/dir/file2.rsΛΒ» if project is local.
Or go to /root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
#[cfg(target_os = "windows")]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to ../dir/file2.rs if you want.
Or go to Β«C:/root/dir/file2.rsΛΒ» if project is local.
Or go to C:/root/dir/file2 if this is a Rust file.
- "});
+ "},
+ );
// Moving the mouse over a path that exists, if we add the language-specific suffix, it should highlight it
#[cfg(not(target_os = "windows"))]
@@ -1684,21 +1754,27 @@ mod tests {
cx.simulate_mouse_move(screen_coord, None, Modifiers::secondary_key());
#[cfg(not(target_os = "windows"))]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to ../dir/file2.rs if you want.
Or go to /root/dir/file2.rs if project is local.
Or go to Β«/root/dir/file2ΛΒ» if this is a Rust file.
- "});
+ "},
+ );
#[cfg(target_os = "windows")]
- cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
+ cx.assert_editor_text_highlights(
+ HighlightKey::HoveredLinkState,
+ indoc! {"
You can't go to a file that does_not_exist.txt.
Go to file2.rs if you want.
Or go to ../dir/file2.rs if you want.
Or go to C:/root/dir/file2.rs if project is local.
Or go to Β«C:/root/dir/file2ΛΒ» if this is a Rust file.
- "});
+ "},
+ );
cx.simulate_click(screen_coord, Modifiers::secondary_key());
@@ -1755,7 +1831,7 @@ mod tests {
assert!(
editor
.snapshot(window, cx)
- .text_highlight_ranges::<HoveredLinkState>()
+ .text_highlight_ranges(HighlightKey::HoveredLinkState)
.unwrap_or_default()
.1
.is_empty()
@@ -1793,7 +1869,7 @@ mod tests {
assert!(
editor
.snapshot(window, cx)
- .text_highlight_ranges::<HoveredLinkState>()
+ .text_highlight_ranges(HighlightKey::HoveredLinkState)
.unwrap_or_default()
.1
.is_empty()
@@ -1,6 +1,6 @@
use crate::{
ActiveDiagnostic, Anchor, AnchorRangeExt, DisplayPoint, DisplayRow, Editor, EditorSettings,
- EditorSnapshot, GlobalDiagnosticRenderer, Hover,
+ EditorSnapshot, GlobalDiagnosticRenderer, HighlightKey, Hover,
display_map::{InlayOffset, ToDisplayPoint, is_invisible},
hover_links::{InlayHighlight, RangeInEditor},
movement::TextLayoutDetails,
@@ -217,7 +217,7 @@ pub fn hide_hover(editor: &mut Editor, cx: &mut Context<Editor>) -> bool {
editor.hover_state.info_task = None;
editor.hover_state.triggered_from = None;
- editor.clear_background_highlights::<HoverState>(cx);
+ editor.clear_background_highlights(HighlightKey::HoverState, cx);
if did_hide {
cx.notify();
@@ -513,10 +513,11 @@ fn show_hover(
this.update_in(cx, |editor, window, cx| {
if hover_highlights.is_empty() {
- editor.clear_background_highlights::<HoverState>(cx);
+ editor.clear_background_highlights(HighlightKey::HoverState, cx);
} else {
// Highlight the selected symbol using a background highlight
- editor.highlight_background::<HoverState>(
+ editor.highlight_background(
+ HighlightKey::HoverState,
&hover_highlights,
|_, theme| theme.colors().element_hover, // todo update theme
cx,
@@ -17,14 +17,14 @@
/// Logic, related to managing LSP inlay hint inlays.
pub mod inlay_hints;
-use std::{any::TypeId, sync::OnceLock};
+use std::sync::OnceLock;
use gpui::{Context, HighlightStyle, Hsla, Rgba, Task};
use multi_buffer::Anchor;
use project::{InlayHint, InlayId};
use text::Rope;
-use crate::{Editor, hover_links::InlayHighlight};
+use crate::{Editor, HighlightKey, hover_links::InlayHighlight};
/// A splice to send into the `inlay_map` for updating the visible inlays on the screen.
/// "Visible" inlays may not be displayed in the buffer right away, but those are ready to be displayed on further buffer scroll, pane item activations, etc. right away without additional LSP queries or settings changes.
@@ -164,15 +164,15 @@ impl Editor {
cx.notify();
}
- pub(crate) fn highlight_inlays<T: 'static>(
+ pub(crate) fn highlight_inlays(
&mut self,
+ key: HighlightKey,
highlights: Vec<InlayHighlight>,
style: HighlightStyle,
cx: &mut Context<Self>,
) {
- self.display_map.update(cx, |map, _| {
- map.highlight_inlays(TypeId::of::<T>(), highlights, style)
- });
+ self.display_map
+ .update(cx, |map, _| map.highlight_inlays(key, highlights, style));
cx.notify();
}
@@ -210,6 +210,7 @@ pub enum InlayHintRefreshReason {
SettingsChange(InlayHintSettings),
NewLinesShown,
BufferEdited(BufferId),
+ ServerRemoved,
RefreshRequested {
server_id: LanguageServerId,
request_id: Option<usize>,
@@ -267,7 +268,7 @@ impl Editor {
reason: InlayHintRefreshReason,
cx: &mut Context<Self>,
) {
- if self.ignore_lsp_data() || self.inlay_hints.is_none() {
+ if !self.mode().is_full() || self.inlay_hints.is_none() {
return;
}
let Some(semantics_provider) = self.semantics_provider() else {
@@ -297,7 +298,8 @@ impl Editor {
let ignore_previous_fetches = match reason {
InlayHintRefreshReason::ModifiersChanged(_)
| InlayHintRefreshReason::Toggle(_)
- | InlayHintRefreshReason::SettingsChange(_) => true,
+ | InlayHintRefreshReason::SettingsChange(_)
+ | InlayHintRefreshReason::ServerRemoved => true,
InlayHintRefreshReason::NewLinesShown
| InlayHintRefreshReason::RefreshRequested { .. }
| InlayHintRefreshReason::ExcerptsRemoved(_) => false,
@@ -505,6 +507,7 @@ impl Editor {
self.splice_inlays(&to_remove, Vec::new(), cx);
return None;
}
+ InlayHintRefreshReason::ServerRemoved => InvalidationStrategy::BufferEdited,
InlayHintRefreshReason::NewLinesShown => InvalidationStrategy::None,
InlayHintRefreshReason::BufferEdited(_) => InvalidationStrategy::BufferEdited,
InlayHintRefreshReason::RefreshRequested {
@@ -1,7 +1,7 @@
use crate::{
Anchor, Autoscroll, BufferSerialization, Capability, Editor, EditorEvent, EditorSettings,
ExcerptId, ExcerptRange, FormatTarget, MultiBuffer, MultiBufferSnapshot, NavigationData,
- ReportEditorEvent, SearchWithinRange, SelectionEffects, ToPoint as _,
+ ReportEditorEvent, SelectionEffects, ToPoint as _,
display_map::HighlightKey,
editor_settings::SeedQuerySetting,
persistence::{DB, SerializedEditor},
@@ -1494,13 +1494,12 @@ impl Editor {
}
}
-pub(crate) enum BufferSearchHighlights {}
impl SearchableItem for Editor {
type Match = Range<Anchor>;
fn get_matches(&self, _window: &mut Window, _: &mut App) -> Vec<Range<Anchor>> {
self.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<BufferSearchHighlights>()))
+ .get(&HighlightKey::BufferSearchHighlights)
.map_or(Vec::new(), |(_color, ranges)| {
ranges.iter().cloned().collect()
})
@@ -1508,7 +1507,7 @@ impl SearchableItem for Editor {
fn clear_matches(&mut self, _: &mut Window, cx: &mut Context<Self>) {
if self
- .clear_background_highlights::<BufferSearchHighlights>(cx)
+ .clear_background_highlights(HighlightKey::BufferSearchHighlights, cx)
.is_some()
{
cx.emit(SearchEvent::MatchesInvalidated);
@@ -1524,10 +1523,11 @@ impl SearchableItem for Editor {
) {
let existing_range = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<BufferSearchHighlights>()))
+ .get(&HighlightKey::BufferSearchHighlights)
.map(|(_, range)| range.as_ref());
let updated = existing_range != Some(matches);
- self.highlight_background::<BufferSearchHighlights>(
+ self.highlight_background(
+ HighlightKey::BufferSearchHighlights,
matches,
move |index, theme| {
if active_match_index == Some(*index) {
@@ -1544,7 +1544,7 @@ impl SearchableItem for Editor {
}
fn has_filtered_search_ranges(&mut self) -> bool {
- self.has_background_highlights::<SearchWithinRange>()
+ self.has_background_highlights(HighlightKey::SearchWithinRange)
}
fn toggle_filtered_search_ranges(
@@ -1555,7 +1555,7 @@ impl SearchableItem for Editor {
) {
if self.has_filtered_search_ranges() {
self.previous_search_ranges = self
- .clear_background_highlights::<SearchWithinRange>(cx)
+ .clear_background_highlights(HighlightKey::SearchWithinRange, cx)
.map(|(_, ranges)| ranges)
}
@@ -1779,7 +1779,7 @@ impl SearchableItem for Editor {
let buffer = self.buffer().read(cx).snapshot(cx);
let search_within_ranges = self
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<SearchWithinRange>()))
+ .get(&HighlightKey::SearchWithinRange)
.map_or(vec![], |(_color, ranges)| {
ranges.iter().cloned().collect::<Vec<_>>()
});
@@ -49,7 +49,7 @@ pub(super) fn refresh_linked_ranges(
window: &mut Window,
cx: &mut Context<Editor>,
) -> Option<()> {
- if editor.ignore_lsp_data() || editor.pending_rename.is_some() {
+ if !editor.mode().is_full() || editor.pending_rename.is_some() {
return None;
}
let project = editor.project()?.downgrade();
@@ -2,7 +2,7 @@ use std::{cmp, ops::Range};
use collections::HashMap;
use futures::future::join_all;
-use gpui::{Hsla, Rgba, Task};
+use gpui::{Hsla, Rgba};
use itertools::Itertools;
use language::point_from_lsp;
use multi_buffer::Anchor;
@@ -149,7 +149,7 @@ impl Editor {
_: &Window,
cx: &mut Context<Self>,
) {
- if self.ignore_lsp_data() {
+ if !self.mode().is_full() {
return;
}
let Some(project) = self.project.clone() else {
@@ -163,10 +163,11 @@ impl Editor {
return;
}
- let visible_buffers = self
+ let buffers_to_query = self
.visible_excerpts(true, cx)
.into_values()
.map(|(buffer, ..)| buffer)
+ .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
.filter(|editor_buffer| {
let editor_buffer_id = editor_buffer.read(cx).remote_id();
buffer_id.is_none_or(|buffer_id| buffer_id == editor_buffer_id)
@@ -175,30 +176,39 @@ impl Editor {
.unique_by(|buffer| buffer.read(cx).remote_id())
.collect::<Vec<_>>();
- let all_colors_task = project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
- visible_buffers
- .into_iter()
- .filter_map(|buffer| {
- let buffer_id = buffer.read(cx).remote_id();
- let known_cache_version = self.colors.as_ref().and_then(|colors| {
- Some(colors.buffer_colors.get(&buffer_id)?.cache_version_used)
- });
- let colors_task = lsp_store.document_colors(known_cache_version, buffer, cx)?;
- Some(async move { (buffer_id, colors_task.await) })
- })
- .collect::<Vec<_>>()
- });
-
- if all_colors_task.is_empty() {
- self.refresh_colors_task = Task::ready(());
- return;
- }
-
self.refresh_colors_task = cx.spawn(async move |editor, cx| {
cx.background_executor()
.timer(FETCH_COLORS_DEBOUNCE_TIMEOUT)
.await;
+ let Some(all_colors_task) = editor
+ .update(cx, |editor, cx| {
+ project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
+ buffers_to_query
+ .into_iter()
+ .filter_map(|buffer| {
+ let buffer_id = buffer.read(cx).remote_id();
+ let known_cache_version =
+ editor.colors.as_ref().and_then(|colors| {
+ Some(
+ colors
+ .buffer_colors
+ .get(&buffer_id)?
+ .cache_version_used,
+ )
+ });
+ let colors_task =
+ lsp_store.document_colors(known_cache_version, buffer, cx)?;
+ Some(async move { (buffer_id, colors_task.await) })
+ })
+ .collect::<Vec<_>>()
+ })
+ })
+ .ok()
+ else {
+ return;
+ };
+
let all_colors = join_all(all_colors_task).await;
if all_colors.is_empty() {
return;
@@ -0,0 +1,1497 @@
+use std::{collections::hash_map, sync::Arc, time::Duration};
+
+use collections::HashSet;
+use futures::future::join_all;
+use gpui::{
+ Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
+};
+use itertools::Itertools as _;
+use language::language_settings::language_settings;
+use project::lsp_store::{
+ BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer, TokenType,
+};
+use settings::{SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight};
+use text::BufferId;
+use theme::SyntaxTheme;
+use ui::ActiveTheme as _;
+
+use crate::{
+ Editor,
+ actions::ToggleSemanticHighlights,
+ display_map::{HighlightStyleInterner, SemanticTokenHighlight},
+};
+
+impl Editor {
+ pub fn semantic_highlights_enabled(&self) -> bool {
+ self.semantic_tokens_enabled
+ }
+
+ pub fn toggle_semantic_highlights(
+ &mut self,
+ _: &ToggleSemanticHighlights,
+ _window: &mut gpui::Window,
+ cx: &mut Context<Self>,
+ ) {
+ self.semantic_tokens_enabled = !self.semantic_tokens_enabled;
+ self.update_semantic_tokens(None, None, cx);
+ }
+
+ pub(crate) fn update_semantic_tokens(
+ &mut self,
+ buffer_id: Option<BufferId>,
+ for_server: Option<RefreshForServer>,
+ cx: &mut Context<Self>,
+ ) {
+ if !self.mode().is_full() || !self.semantic_tokens_enabled {
+ self.semantic_tokens_fetched_for_buffers.clear();
+ self.display_map.update(cx, |display_map, _| {
+ display_map.semantic_token_highlights.clear();
+ });
+ self.update_semantic_tokens_task = Task::ready(());
+ cx.notify();
+ return;
+ }
+
+ let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
+ if for_server.is_some() {
+ invalidate_semantic_highlights_for_buffers.extend(
+ self.semantic_tokens_fetched_for_buffers
+ .drain()
+ .map(|(buffer_id, _)| buffer_id),
+ );
+ }
+
+ let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
+ else {
+ return;
+ };
+
+ let buffers_to_query = self
+ .visible_excerpts(true, cx)
+ .into_values()
+ .map(|(buffer, ..)| buffer)
+ .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
+ .filter_map(|editor_buffer| {
+ let editor_buffer_id = editor_buffer.read(cx).remote_id();
+ if self.registered_buffers.contains_key(&editor_buffer_id)
+ && language_settings(
+ editor_buffer.read(cx).language().map(|l| l.name()),
+ editor_buffer.read(cx).file(),
+ cx,
+ )
+ .semantic_tokens
+ .enabled()
+ {
+ Some((editor_buffer_id, editor_buffer))
+ } else {
+ None
+ }
+ })
+ .unique_by(|(buffer_id, _)| *buffer_id)
+ .collect::<Vec<_>>();
+
+ self.update_semantic_tokens_task = cx.spawn(async move |editor, cx| {
+ cx.background_executor()
+ .timer(Duration::from_millis(50))
+ .await;
+ let Some(all_semantic_tokens_task) = editor
+ .update(cx, |editor, cx| {
+ buffers_to_query
+ .into_iter()
+ .filter_map(|(buffer_id, buffer)| {
+ let known_version =
+ editor.semantic_tokens_fetched_for_buffers.get(&buffer_id);
+ let query_version = buffer.read(cx).version();
+ if known_version.is_some_and(|known_version| {
+ !query_version.changed_since(known_version)
+ }) {
+ None
+ } else {
+ let task = sema.semantic_tokens(buffer, for_server, cx);
+ Some(async move { (buffer_id, query_version, task.await) })
+ }
+ })
+ .collect::<Vec<_>>()
+ })
+ .ok()
+ else {
+ return;
+ };
+
+ let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
+ editor.update(cx, |editor, cx| {
+ editor.display_map.update(cx, |display_map, _| {
+ for buffer_id in invalidate_semantic_highlights_for_buffers {
+ display_map.invalidate_semantic_highlights(buffer_id);
+ }
+ });
+
+
+ if all_semantic_tokens.is_empty() {
+ return;
+ }
+ let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ let all_excerpts = editor.buffer().read(cx).excerpt_ids();
+
+ for (buffer_id, query_version, tokens) in all_semantic_tokens {
+ let tokens = match tokens {
+ Ok(BufferSemanticTokens { tokens: Some(tokens) }) => {
+ tokens
+ },
+ Ok(BufferSemanticTokens { tokens: None }) => {
+ continue;
+ },
+ Err(e) => {
+ log::error!("Failed to fetch semantic tokens for buffer {buffer_id:?}: {e:#}");
+ continue;
+ },
+ };
+
+ match editor.semantic_tokens_fetched_for_buffers.entry(buffer_id) {
+ hash_map::Entry::Occupied(mut o) => {
+ if query_version.changed_since(o.get()) {
+ o.insert(query_version);
+ } else {
+ continue;
+ }
+ },
+ hash_map::Entry::Vacant(v) => {
+ v.insert(query_version);
+ },
+ }
+
+ let language_name = editor
+ .buffer()
+ .read(cx)
+ .buffer(buffer_id)
+ .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
+
+ editor.display_map.update(cx, |display_map, cx| {
+ project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
+ let mut token_highlights = Vec::new();
+ let mut interner = HighlightStyleInterner::default();
+ for (server_id, server_tokens) in tokens {
+ let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
+ server_id,
+ language_name.as_ref(),
+ cx,
+ )
+ else {
+ continue;
+ };
+ token_highlights.extend(buffer_into_editor_highlights(
+ &server_tokens,
+ stylizer,
+ &all_excerpts,
+ &multi_buffer_snapshot,
+ &mut interner,
+ cx,
+ ));
+ }
+
+ token_highlights.sort_by(|a, b| {
+ a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
+ });
+ display_map
+ .semantic_token_highlights
+ .insert(buffer_id, (Arc::from(token_highlights), Arc::new(interner)));
+ });
+ });
+ }
+
+ cx.notify();
+ }).ok();
+ });
+ }
+}
+
+fn buffer_into_editor_highlights<'a, 'b>(
+ buffer_tokens: &'a [BufferSemanticToken],
+ stylizer: &'a SemanticTokenStylizer,
+ all_excerpts: &'a [multi_buffer::ExcerptId],
+ multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
+ interner: &'b mut HighlightStyleInterner,
+ cx: &'a gpui::App,
+) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
+ buffer_tokens.iter().filter_map(|token| {
+ let multi_buffer_start = all_excerpts
+ .iter()
+ .find_map(|&excerpt_id| {
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.start)
+ })
+ .and_then(|anchor| anchor.try_into().ok())?;
+ let multi_buffer_end = all_excerpts
+ .iter()
+ .find_map(|&excerpt_id| {
+ multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.end)
+ })
+ .and_then(|anchor| anchor.try_into().ok())?;
+
+ let style = convert_token(
+ stylizer,
+ cx.theme().syntax(),
+ token.token_type,
+ token.token_modifiers,
+ )?;
+ let style = interner.intern(style);
+ Some(SemanticTokenHighlight {
+ range: multi_buffer_start..multi_buffer_end,
+ style,
+ token_type: token.token_type,
+ token_modifiers: token.token_modifiers,
+ server_id: stylizer.server_id(),
+ })
+ })
+}
+
+fn convert_token(
+ stylizer: &SemanticTokenStylizer,
+ theme: &SyntaxTheme,
+ token_type: TokenType,
+ modifiers: u32,
+) -> Option<HighlightStyle> {
+ let rules = stylizer.rules_for_token(token_type)?;
+ let matching = rules.iter().filter(|rule| {
+ rule.token_modifiers
+ .iter()
+ .all(|m| stylizer.has_modifier(modifiers, m))
+ });
+
+ let mut highlight = HighlightStyle::default();
+ let mut empty = true;
+
+ for rule in matching {
+ empty = false;
+
+ let style = rule.style.iter().find_map(|style| theme.get_opt(style));
+
+ macro_rules! overwrite {
+ (
+ highlight.$highlight_field:ident,
+ SemanticTokenRule::$rule_field:ident,
+ $transform:expr $(,)?
+ ) => {
+ highlight.$highlight_field = rule
+ .$rule_field
+ .map($transform)
+ .or_else(|| style.and_then(|s| s.$highlight_field))
+ .or(highlight.$highlight_field)
+ };
+ }
+
+ overwrite!(
+ highlight.color,
+ SemanticTokenRule::foreground_color,
+ Into::into,
+ );
+
+ overwrite!(
+ highlight.background_color,
+ SemanticTokenRule::background_color,
+ Into::into,
+ );
+
+ overwrite!(
+ highlight.font_weight,
+ SemanticTokenRule::font_weight,
+ |w| match w {
+ SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
+ SemanticTokenFontWeight::Bold => FontWeight::BOLD,
+ },
+ );
+
+ overwrite!(
+ highlight.font_style,
+ SemanticTokenRule::font_style,
+ |s| match s {
+ SemanticTokenFontStyle::Normal => FontStyle::Normal,
+ SemanticTokenFontStyle::Italic => FontStyle::Italic,
+ },
+ );
+
+ overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
+ UnderlineStyle {
+ thickness: 1.0.into(),
+ color: match u {
+ SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
+ SemanticTokenColorOverride::InheritForeground(false) => None,
+ SemanticTokenColorOverride::Replace(c) => Some(c.into()),
+ },
+ ..Default::default()
+ }
+ });
+
+ overwrite!(
+ highlight.strikethrough,
+ SemanticTokenRule::strikethrough,
+ |s| StrikethroughStyle {
+ thickness: 1.0.into(),
+ color: match s {
+ SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
+ SemanticTokenColorOverride::InheritForeground(false) => None,
+ SemanticTokenColorOverride::Replace(c) => Some(c.into()),
+ },
+ },
+ );
+ }
+
+ if empty { None } else { Some(highlight) }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::{
+ ops::{Deref as _, Range},
+ sync::atomic::{self, AtomicUsize},
+ };
+
+ use futures::StreamExt as _;
+ use gpui::{
+ AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, VisualTestContext,
+ };
+ use language::{Language, LanguageConfig, LanguageMatcher};
+ use languages::FakeLspAdapter;
+ use multi_buffer::{
+ DiffbaselessAnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer,
+ MultiBufferOffset,
+ };
+ use project::Project;
+ use rope::Point;
+ use serde_json::json;
+ use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
+ use workspace::{Workspace, WorkspaceHandle as _};
+
+ use crate::{
+ Capability,
+ editor_tests::{init_test, update_test_language_settings},
+ test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
+ };
+
+ use super::*;
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let full_counter = Arc::new(AtomicUsize::new(0));
+ let full_counter_clone = full_counter.clone();
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| {
+ full_counter_clone.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ 4, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ // The server isn't capable of deltas, so even though we sent back
+ // a result ID, the client shouldn't request a delta.
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+
+ cx.set_state("Λfn main() {}");
+ assert!(full_request.next().await.is_some());
+
+ cx.run_until_parked();
+
+ cx.set_state("Λfn main() { a }");
+ assert!(full_request.next().await.is_some());
+
+ cx.run_until_parked();
+
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx),
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
+ );
+
+ assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
+ }
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let full_counter = Arc::new(AtomicUsize::new(0));
+ let full_counter_clone = full_counter.clone();
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| {
+ full_counter_clone.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ 4, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: None, // Sending back `None` forces the client to not use deltas.
+ },
+ )))
+ }
+ },
+ );
+
+ cx.set_state("Λfn main() {}");
+ assert!(full_request.next().await.is_some());
+
+ let task = cx.update_editor(|e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ task.await;
+
+ cx.set_state("Λfn main() { a }");
+ assert!(full_request.next().await.is_some());
+
+ let task = cx.update_editor(|e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ task.await;
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx),
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
+ );
+ assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
+ }
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: vec!["function".into()],
+ token_modifiers: vec![],
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
+ ..Default::default()
+ },
+ ),
+ ),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ let full_counter = Arc::new(AtomicUsize::new(0));
+ let full_counter_clone = full_counter.clone();
+ let delta_counter = Arc::new(AtomicUsize::new(0));
+ let delta_counter_clone = delta_counter.clone();
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| {
+ full_counter_clone.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ 4, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+
+ let mut delta_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
+ move |_, params, _| {
+ delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
+ assert_eq!(params.previous_result_id, "a");
+ async move {
+ Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
+ lsp::SemanticTokensDelta {
+ edits: vec![],
+ result_id: Some("b".into()),
+ },
+ )))
+ }
+ },
+ );
+
+ // Initial request, for the empty buffer.
+ cx.set_state("Λfn main() {}");
+ assert!(full_request.next().await.is_some());
+ let task = cx.update_editor(|e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ task.await;
+
+ cx.set_state("Λfn main() { a }");
+ assert!(delta_request.next().await.is_some());
+ let task = cx.update_editor(|e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ task.await;
+
+ assert_eq!(
+ extract_semantic_highlights(&cx.editor, &cx),
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
+ );
+
+ assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
+ assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
+ }
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "TOML".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let toml_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "TOML".into(),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["toml".into()],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ None,
+ ));
+
+ // We have 2 language servers for TOML in this test.
+ let toml_legend_1 = lsp::SemanticTokensLegend {
+ token_types: vec!["property".into()],
+ token_modifiers: vec![],
+ };
+ let toml_legend_2 = lsp::SemanticTokensLegend {
+ token_types: vec!["number".into()],
+ token_modifiers: vec![],
+ };
+
+ let app_state = cx.update(workspace::AppState::test);
+
+ cx.update(|cx| {
+ assets::Assets.load_test_fonts(cx);
+ crate::init(cx);
+ workspace::init(app_state.clone(), cx);
+ });
+
+ let project = Project::test(app_state.fs.clone(), [], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+
+ let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
+ let full_counter_toml_1_clone = full_counter_toml_1.clone();
+ let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
+ let full_counter_toml_2_clone = full_counter_toml_2.clone();
+
+ let mut toml_server_1 = language_registry.register_fake_lsp(
+ toml_language.name(),
+ FakeLspAdapter {
+ name: "toml1",
+ capabilities: lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: toml_legend_1,
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ initializer: Some(Box::new({
+ let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
+ move |fake_server| {
+ let full_counter = full_counter_toml_1_clone.clone();
+ fake_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _| {
+ full_counter.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ // highlight 'a' as a property
+ data: vec![
+ 0, // delta_line
+ 0, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+ }
+ })),
+ ..FakeLspAdapter::default()
+ },
+ );
+ let mut toml_server_2 = language_registry.register_fake_lsp(
+ toml_language.name(),
+ FakeLspAdapter {
+ name: "toml2",
+ capabilities: lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: toml_legend_2,
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ initializer: Some(Box::new({
+ let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
+ move |fake_server| {
+ let full_counter = full_counter_toml_2_clone.clone();
+ fake_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _| {
+ full_counter.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ // highlight '3' as a literal
+ data: vec![
+ 0, // delta_line
+ 4, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+ }
+ })),
+ ..FakeLspAdapter::default()
+ },
+ );
+ language_registry.add(toml_language.clone());
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ EditorLspTestContext::root_path(),
+ json!({
+ ".git": {},
+ "dir": {
+ "foo.toml": "a = 1\nb = 2\n",
+ }
+ }),
+ )
+ .await;
+
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let workspace = window.root(cx).unwrap();
+
+ let mut cx = VisualTestContext::from_window(*window.deref(), cx);
+ project
+ .update(&mut cx, |project, cx| {
+ project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
+ })
+ .await
+ .unwrap();
+ cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
+ .await;
+
+ let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
+ let toml_item = workspace
+ .update_in(&mut cx, |workspace, window, cx| {
+ workspace.open_path(toml_file, None, true, window, cx)
+ })
+ .await
+ .expect("Could not open test file");
+
+ let editor = cx.update(|_, cx| {
+ toml_item
+ .act_as::<Editor>(cx)
+ .expect("Opened test file wasn't an editor")
+ });
+
+ editor.update_in(&mut cx, |editor, window, cx| {
+ let nav_history = workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .nav_history_for_item(&cx.entity());
+ editor.set_nav_history(Some(nav_history));
+ window.focus(&editor.focus_handle(cx), cx)
+ });
+
+ let _toml_server_1 = toml_server_1.next().await.unwrap();
+ let _toml_server_2 = toml_server_2.next().await.unwrap();
+
+ // Trigger semantic tokens.
+ editor.update_in(&mut cx, |editor, _, cx| {
+ editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
+ });
+ cx.executor().advance_clock(Duration::from_millis(200));
+ let task = editor.update_in(&mut cx, |e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ cx.run_until_parked();
+ task.await;
+
+ assert_eq!(
+ extract_semantic_highlights(&editor, &cx),
+ vec![
+ MultiBufferOffset(0)..MultiBufferOffset(1),
+ MultiBufferOffset(4)..MultiBufferOffset(5),
+ ]
+ );
+
+ assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
+ assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
+ }
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "TOML".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let toml_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "TOML".into(),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["toml".into()],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ None,
+ ));
+ let rust_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["rs".into()],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ None,
+ ));
+
+ let toml_legend = lsp::SemanticTokensLegend {
+ token_types: vec!["property".into()],
+ token_modifiers: vec![],
+ };
+ let rust_legend = lsp::SemanticTokensLegend {
+ token_types: vec!["constant".into()],
+ token_modifiers: vec![],
+ };
+
+ let app_state = cx.update(workspace::AppState::test);
+
+ cx.update(|cx| {
+ assets::Assets.load_test_fonts(cx);
+ crate::init(cx);
+ workspace::init(app_state.clone(), cx);
+ });
+
+ let project = Project::test(app_state.fs.clone(), [], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ let full_counter_toml = Arc::new(AtomicUsize::new(0));
+ let full_counter_toml_clone = full_counter_toml.clone();
+
+ let mut toml_server = language_registry.register_fake_lsp(
+ toml_language.name(),
+ FakeLspAdapter {
+ name: "toml",
+ capabilities: lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: toml_legend,
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ initializer: Some(Box::new({
+ let full_counter_toml_clone = full_counter_toml_clone.clone();
+ move |fake_server| {
+ let full_counter = full_counter_toml_clone.clone();
+ fake_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _| {
+ full_counter.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
+ data: vec![
+ 0, // delta_line (line 0)
+ 0, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ 1, // delta_line (line 1)
+ 0, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ 1, // delta_line (line 2)
+ 0, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+ }
+ })),
+ ..FakeLspAdapter::default()
+ },
+ );
+ language_registry.add(toml_language.clone());
+ let mut rust_server = language_registry.register_fake_lsp(
+ rust_language.name(),
+ FakeLspAdapter {
+ name: "rust",
+ capabilities: lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: rust_legend,
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ ..FakeLspAdapter::default()
+ },
+ );
+ language_registry.add(rust_language.clone());
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ EditorLspTestContext::root_path(),
+ json!({
+ ".git": {},
+ "dir": {
+ "foo.toml": "a = 1\nb = 2\nc = 3\n",
+ "bar.rs": "const c: usize = 3;\n",
+ }
+ }),
+ )
+ .await;
+
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let workspace = window.root(cx).unwrap();
+
+ let mut cx = VisualTestContext::from_window(*window.deref(), cx);
+ project
+ .update(&mut cx, |project, cx| {
+ project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
+ })
+ .await
+ .unwrap();
+ cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
+ .await;
+
+ let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
+ let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
+ let (toml_item, rust_item) = workspace.update_in(&mut cx, |workspace, window, cx| {
+ (
+ workspace.open_path(toml_file, None, true, window, cx),
+ workspace.open_path(rust_file, None, true, window, cx),
+ )
+ });
+ let toml_item = toml_item.await.expect("Could not open test file");
+ let rust_item = rust_item.await.expect("Could not open test file");
+
+ let (toml_editor, rust_editor) = cx.update(|_, cx| {
+ (
+ toml_item
+ .act_as::<Editor>(cx)
+ .expect("Opened test file wasn't an editor"),
+ rust_item
+ .act_as::<Editor>(cx)
+ .expect("Opened test file wasn't an editor"),
+ )
+ });
+ let toml_buffer = cx.read(|cx| {
+ toml_editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ });
+ let rust_buffer = cx.read(|cx| {
+ rust_editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ });
+ let multibuffer = cx.new(|cx| {
+ let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+ multibuffer.push_excerpts(
+ toml_buffer.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ rust_buffer.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
+ cx,
+ );
+ multibuffer
+ });
+
+ let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
+ let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
+ workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
+ editor
+ });
+ editor.update_in(&mut cx, |editor, window, cx| {
+ let nav_history = workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .nav_history_for_item(&cx.entity());
+ editor.set_nav_history(Some(nav_history));
+ window.focus(&editor.focus_handle(cx), cx)
+ });
+
+ let _toml_server = toml_server.next().await.unwrap();
+ let _rust_server = rust_server.next().await.unwrap();
+
+ // Initial request.
+ cx.executor().advance_clock(Duration::from_millis(200));
+ let task = editor.update_in(&mut cx, |e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ cx.run_until_parked();
+ task.await;
+ assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
+ cx.run_until_parked();
+
+ // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
+ // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
+ assert_eq!(
+ extract_semantic_highlights(&editor, &cx),
+ vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
+ );
+
+ // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
+ let toml_excerpt_id =
+ editor.read_with(&cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
+ editor.update_in(&mut cx, |editor, _, cx| {
+ editor.buffer().update(cx, |buffer, cx| {
+ buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
+ });
+ });
+
+ // Wait for semantic tokens to be re-fetched after expansion.
+ cx.executor().advance_clock(Duration::from_millis(200));
+ let task = editor.update_in(&mut cx, |e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ cx.run_until_parked();
+ task.await;
+
+ // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
+ // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
+ // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
+ assert_eq!(
+ extract_semantic_highlights(&editor, &cx),
+ vec![
+ MultiBufferOffset(0)..MultiBufferOffset(1),
+ MultiBufferOffset(6)..MultiBufferOffset(7),
+ MultiBufferOffset(12)..MultiBufferOffset(13),
+ ]
+ );
+ }
+
+ #[gpui::test]
+ async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "TOML".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..Default::default()
+ },
+ );
+ });
+
+ let toml_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "TOML".into(),
+ matcher: LanguageMatcher {
+ path_suffixes: vec!["toml".into()],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ None,
+ ));
+
+ let toml_legend = lsp::SemanticTokensLegend {
+ token_types: vec!["property".into()],
+ token_modifiers: vec![],
+ };
+
+ let app_state = cx.update(workspace::AppState::test);
+
+ cx.update(|cx| {
+ assets::Assets.load_test_fonts(cx);
+ crate::init(cx);
+ workspace::init(app_state.clone(), cx);
+ });
+
+ let project = Project::test(app_state.fs.clone(), [], cx).await;
+ let language_registry = project.read_with(cx, |project, _| project.languages().clone());
+ let full_counter_toml = Arc::new(AtomicUsize::new(0));
+ let full_counter_toml_clone = full_counter_toml.clone();
+
+ let mut toml_server = language_registry.register_fake_lsp(
+ toml_language.name(),
+ FakeLspAdapter {
+ name: "toml",
+ capabilities: lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: toml_legend,
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ initializer: Some(Box::new({
+ let full_counter_toml_clone = full_counter_toml_clone.clone();
+ move |fake_server| {
+ let full_counter = full_counter_toml_clone.clone();
+ fake_server
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _| {
+ full_counter.fetch_add(1, atomic::Ordering::Release);
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ // highlight 'a' as a property
+ data: vec![
+ 0, // delta_line
+ 0, // delta_start
+ 1, // length
+ 0, // token_type
+ 0, // token_modifiers_bitset
+ ],
+ result_id: Some("a".into()),
+ },
+ )))
+ }
+ },
+ );
+ }
+ })),
+ ..FakeLspAdapter::default()
+ },
+ );
+ language_registry.add(toml_language.clone());
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ EditorLspTestContext::root_path(),
+ json!({
+ ".git": {},
+ "dir": {
+ "foo.toml": "a = 1\nb = 2\n",
+ }
+ }),
+ )
+ .await;
+
+ let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
+ let workspace = window.root(cx).unwrap();
+
+ let mut cx = VisualTestContext::from_window(*window.deref(), cx);
+ project
+ .update(&mut cx, |project, cx| {
+ project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
+ })
+ .await
+ .unwrap();
+ cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
+ .await;
+
+ let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
+ let toml_item = workspace
+ .update_in(&mut cx, |workspace, window, cx| {
+ workspace.open_path(toml_file, None, true, window, cx)
+ })
+ .await
+ .expect("Could not open test file");
+
+ let toml_editor = cx.update(|_, cx| {
+ toml_item
+ .act_as::<Editor>(cx)
+ .expect("Opened test file wasn't an editor")
+ });
+ let toml_buffer = cx.read(|cx| {
+ toml_editor
+ .read(cx)
+ .buffer()
+ .read(cx)
+ .as_singleton()
+ .unwrap()
+ });
+ let multibuffer = cx.new(|cx| {
+ let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
+ multibuffer.push_excerpts(
+ toml_buffer.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ toml_buffer.clone(),
+ [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
+ cx,
+ );
+ multibuffer
+ });
+
+ let editor = workspace.update_in(&mut cx, |_, window, cx| {
+ cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
+ });
+ editor.update_in(&mut cx, |editor, window, cx| {
+ let nav_history = workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .nav_history_for_item(&cx.entity());
+ editor.set_nav_history(Some(nav_history));
+ window.focus(&editor.focus_handle(cx), cx)
+ });
+
+ let _toml_server = toml_server.next().await.unwrap();
+
+ // Initial request.
+ cx.executor().advance_clock(Duration::from_millis(200));
+ let task = editor.update_in(&mut cx, |e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ cx.run_until_parked();
+ task.await;
+ assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
+
+ // Edit two parts of the multibuffer, which both map to the same buffer.
+ //
+ // Without debouncing, this grabs semantic tokens 4 times (twice for the
+ // toml editor, and twice for the multibuffer).
+ editor.update_in(&mut cx, |editor, _, cx| {
+ editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
+ editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
+ });
+ cx.executor().advance_clock(Duration::from_millis(200));
+ let task = editor.update_in(&mut cx, |e, _, _| {
+ std::mem::replace(&mut e.update_semantic_tokens_task, Task::ready(()))
+ });
+ cx.run_until_parked();
+ task.await;
+ assert_eq!(
+ extract_semantic_highlights(&editor, &cx),
+ vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
+ );
+
+ assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
+ }
+
+ fn extract_semantic_highlights(
+ editor: &Entity<Editor>,
+ cx: &TestAppContext,
+ ) -> Vec<Range<MultiBufferOffset>> {
+ editor.read_with(cx, |editor, cx| {
+ let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
+ editor
+ .display_map
+ .read(cx)
+ .semantic_token_highlights
+ .iter()
+ .flat_map(|(_, (v, _))| v.iter())
+ .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
+ .collect()
+ })
+ }
+
+ #[gpui::test]
+ async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
+ use gpui::{Hsla, Rgba, UpdateGlobal as _};
+ use settings::{GlobalLspSettingsContent, SemanticTokenRule};
+
+ init_test(cx, |_| {});
+
+ update_test_language_settings(cx, |language_settings| {
+ language_settings.languages.0.insert(
+ "Rust".into(),
+ LanguageSettingsContent {
+ semantic_tokens: Some(SemanticTokens::Full),
+ ..LanguageSettingsContent::default()
+ },
+ );
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ semantic_tokens_provider: Some(
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
+ lsp::SemanticTokensOptions {
+ legend: lsp::SemanticTokensLegend {
+ token_types: Vec::from(["function".into()]),
+ token_modifiers: Vec::new(),
+ },
+ full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
+ ..lsp::SemanticTokensOptions::default()
+ },
+ ),
+ ),
+ ..lsp::ServerCapabilities::default()
+ },
+ cx,
+ )
+ .await;
+
+ let mut full_request = cx
+ .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
+ move |_, _, _| {
+ async move {
+ Ok(Some(lsp::SemanticTokensResult::Tokens(
+ lsp::SemanticTokens {
+ data: vec![
+ 0, // delta_line
+ 3, // delta_start
+ 4, // length
+ 0, // token_type (function)
+ 0, // token_modifiers_bitset
+ ],
+ result_id: None,
+ },
+ )))
+ }
+ },
+ );
+
+ // Trigger initial semantic tokens fetch
+ cx.set_state("Λfn main() {}");
+ full_request.next().await;
+ cx.run_until_parked();
+
+ // Verify initial highlights exist (with no custom color yet)
+ let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
+ assert_eq!(
+ initial_ranges,
+ vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
+ "Should have initial semantic token highlights"
+ );
+ let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
+ assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
+ // Initial color should be None or theme default (not red or blue)
+ let initial_color = initial_styles[0].color;
+
+ // Set a custom foreground color for function tokens via settings.json
+ let red_color = Rgba {
+ r: 1.0,
+ g: 0.0,
+ b: 0.0,
+ a: 1.0,
+ };
+ cx.update(|_, cx| {
+ SettingsStore::update_global(cx, |store, cx| {
+ store.update_user_settings(cx, |settings| {
+ settings.global_lsp_settings = Some(GlobalLspSettingsContent {
+ semantic_token_rules: Some(SemanticTokenRules {
+ rules: Vec::from([SemanticTokenRule {
+ token_type: Some("function".to_string()),
+ foreground_color: Some(red_color),
+ ..SemanticTokenRule::default()
+ }]),
+ }),
+ ..GlobalLspSettingsContent::default()
+ });
+ });
+ });
+ });
+
+ // Trigger a refetch by making an edit (which forces semantic tokens update)
+ cx.set_state("Λfn main() { }");
+ full_request.next().await;
+ cx.run_until_parked();
+
+ // Verify the highlights now have the custom red color
+ let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
+ assert_eq!(
+ styles_after_settings_change.len(),
+ 1,
+ "Should still have one highlight"
+ );
+ assert_eq!(
+ styles_after_settings_change[0].color,
+ Some(Hsla::from(red_color)),
+ "Highlight should have the custom red color from settings.json"
+ );
+ assert_ne!(
+ styles_after_settings_change[0].color, initial_color,
+ "Color should have changed from initial"
+ );
+ }
+
+ fn extract_semantic_highlight_styles(
+ editor: &Entity<Editor>,
+ cx: &TestAppContext,
+ ) -> Vec<HighlightStyle> {
+ editor.read_with(cx, |editor, cx| {
+ editor
+ .display_map
+ .read(cx)
+ .semantic_token_highlights
+ .iter()
+ .flat_map(|(_, (v, interner))| {
+ v.iter().map(|highlights| interner[highlights.style])
+ })
+ .collect()
+ })
+ }
+}
@@ -488,12 +488,12 @@ impl EditorLspTestContext {
}
#[cfg(target_os = "windows")]
- fn root_path() -> &'static Path {
+ pub fn root_path() -> &'static Path {
Path::new("C:\\root")
}
#[cfg(not(target_os = "windows"))]
- fn root_path() -> &'static Path {
+ pub fn root_path() -> &'static Path {
Path::new("/root")
}
}
@@ -17,7 +17,6 @@ use multi_buffer::{Anchor, ExcerptRange, MultiBufferOffset, MultiBufferRow};
use parking_lot::RwLock;
use project::{FakeFs, Project};
use std::{
- any::TypeId,
ops::{Deref, DerefMut, Range},
path::Path,
sync::{
@@ -574,13 +573,13 @@ impl EditorTestContext {
}
#[track_caller]
- pub fn assert_editor_background_highlights<Tag: 'static>(&mut self, marked_text: &str) {
+ pub fn assert_editor_background_highlights(&mut self, key: HighlightKey, marked_text: &str) {
let expected_ranges = self.ranges(marked_text);
let actual_ranges: Vec<Range<usize>> = self.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
editor
.background_highlights
- .get(&HighlightKey::Type(TypeId::of::<Tag>()))
+ .get(&key)
.map(|h| h.1.clone())
.unwrap_or_default()
.iter()
@@ -592,11 +591,11 @@ impl EditorTestContext {
}
#[track_caller]
- pub fn assert_editor_text_highlights<Tag: ?Sized + 'static>(&mut self, marked_text: &str) {
+ pub fn assert_editor_text_highlights(&mut self, key: HighlightKey, marked_text: &str) {
let expected_ranges = self.ranges(marked_text);
let snapshot = self.update_editor(|editor, window, cx| editor.snapshot(window, cx));
let actual_ranges: Vec<Range<usize>> = snapshot
- .text_highlight_ranges::<Tag>()
+ .text_highlight_ranges(key)
.map(|ranges| ranges.as_ref().clone().1)
.unwrap_or_default()
.into_iter()
@@ -9,7 +9,7 @@ use ec4rs::{
use globset::{Glob, GlobMatcher, GlobSet, GlobSetBuilder};
use gpui::{App, Modifiers, SharedString};
use itertools::{Either, Itertools};
-use settings::IntoGpui;
+use settings::{IntoGpui, SemanticTokens};
pub use settings::{
CompletionSettingsContent, EditPredictionProvider, EditPredictionsMode, FormatOnSave,
@@ -106,6 +106,8 @@ pub struct LanguageSettings {
/// - `"!<language_server_id>"` - A language server ID prefixed with a `!` will be disabled.
/// - `"..."` - A placeholder to refer to the **rest** of the registered language servers for this language.
pub language_servers: Vec<String>,
+ /// Controls how semantic tokens from language servers are used for syntax highlighting.
+ pub semantic_tokens: SemanticTokens,
/// Controls where the `editor::Rewrap` action is allowed for this language.
///
/// Note: This setting has no effect in Vim mode, as rewrap is already
@@ -590,6 +592,7 @@ impl settings::Settings for AllLanguageSettings {
jsx_tag_auto_close: settings.jsx_tag_auto_close.unwrap().enabled.unwrap(),
enable_language_server: settings.enable_language_server.unwrap(),
language_servers: settings.language_servers.unwrap(),
+ semantic_tokens: settings.semantic_tokens.unwrap(),
allow_rewrap: settings.allow_rewrap.unwrap(),
show_edit_predictions: settings.show_edit_predictions.unwrap(),
edit_predictions_disabled_in: settings.edit_predictions_disabled_in.unwrap(),
@@ -24,6 +24,7 @@ gpui.workspace = true
itertools.workspace = true
language.workspace = true
lsp.workspace = true
+menu.workspace = true
project.workspace = true
proto.workspace = true
serde_json.workspace = true
@@ -0,0 +1,1033 @@
+use editor::{
+ Anchor, Editor, ExcerptId, HighlightKey, MultiBufferSnapshot, SelectionEffects, ToPoint,
+ scroll::Autoscroll,
+};
+use gpui::{
+ Action, App, AppContext as _, Context, Corner, Div, Entity, EntityId, EventEmitter,
+ FocusHandle, Focusable, HighlightStyle, Hsla, InteractiveElement, IntoElement, MouseButton,
+ MouseDownEvent, MouseMoveEvent, ParentElement, Render, ScrollStrategy, SharedString, Styled,
+ Task, UniformListScrollHandle, WeakEntity, Window, actions, div, rems, uniform_list,
+};
+use menu::{SelectNext, SelectPrevious};
+use std::{mem, ops::Range};
+use theme::ActiveTheme;
+use ui::{
+ ButtonCommon, ButtonLike, ButtonStyle, Color, ContextMenu, FluentBuilder as _, IconButton,
+ IconName, IconPosition, IconSize, Label, LabelCommon, LabelSize, PopoverMenu,
+ PopoverMenuHandle, StyledExt, Toggleable, Tooltip, WithScrollbar, h_flex, v_flex,
+};
+use workspace::{
+ Event as WorkspaceEvent, SplitDirection, ToolbarItemEvent, ToolbarItemLocation,
+ ToolbarItemView, Workspace,
+ item::{Item, ItemHandle},
+};
+
+actions!(
+ dev,
+ [
+ /// Opens the highlights tree view for the current file.
+ OpenHighlightsTreeView,
+ ]
+);
+
+actions!(
+ highlights_tree_view,
+ [
+ /// Toggles showing text highlights.
+ ToggleTextHighlights,
+ /// Toggles showing semantic token highlights.
+ ToggleSemanticTokens,
+ ]
+);
+
+pub fn init(cx: &mut App) {
+ cx.observe_new(move |workspace: &mut Workspace, _, _| {
+ workspace.register_action(move |workspace, _: &OpenHighlightsTreeView, window, cx| {
+ let active_item = workspace.active_item(cx);
+ let workspace_handle = workspace.weak_handle();
+ let highlights_tree_view =
+ cx.new(|cx| HighlightsTreeView::new(workspace_handle, active_item, window, cx));
+ workspace.split_item(
+ SplitDirection::Right,
+ Box::new(highlights_tree_view),
+ window,
+ cx,
+ )
+ });
+ })
+ .detach();
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub enum HighlightCategory {
+ Text(HighlightKey),
+ SemanticToken {
+ token_type: Option<SharedString>,
+ token_modifiers: Option<SharedString>,
+ },
+}
+
+impl HighlightCategory {
+ fn label(&self) -> SharedString {
+ match self {
+ HighlightCategory::Text(key) => format!("text: {key:?}").into(),
+ HighlightCategory::SemanticToken {
+ token_type: Some(token_type),
+ token_modifiers: Some(modifiers),
+ } => format!("semantic token: {token_type} [{modifiers}]").into(),
+ HighlightCategory::SemanticToken {
+ token_type: Some(token_type),
+ token_modifiers: None,
+ } => format!("semantic token: {token_type}").into(),
+ HighlightCategory::SemanticToken {
+ token_type: None,
+ token_modifiers: Some(modifiers),
+ } => format!("semantic token [{modifiers}]").into(),
+ HighlightCategory::SemanticToken {
+ token_type: None,
+ token_modifiers: None,
+ } => "semantic token".into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone)]
+struct HighlightEntry {
+ excerpt_id: ExcerptId,
+ range: Range<Anchor>,
+ range_display: SharedString,
+ style: HighlightStyle,
+ category: HighlightCategory,
+ sort_key: (ExcerptId, u32, u32, u32, u32),
+}
+
+/// An item in the display list: either a separator between excerpts or a highlight entry.
+#[derive(Debug, Clone)]
+enum DisplayItem {
+ ExcerptSeparator {
+ label: SharedString,
+ },
+ Entry {
+ /// Index into `cached_entries`.
+ entry_ix: usize,
+ },
+}
+
+pub struct HighlightsTreeView {
+ workspace_handle: WeakEntity<Workspace>,
+ editor: Option<EditorState>,
+ list_scroll_handle: UniformListScrollHandle,
+ selected_item_ix: Option<usize>,
+ hovered_item_ix: Option<usize>,
+ focus_handle: FocusHandle,
+ cached_entries: Vec<HighlightEntry>,
+ display_items: Vec<DisplayItem>,
+ is_singleton: bool,
+ show_text_highlights: bool,
+ show_semantic_tokens: bool,
+ skip_next_scroll: bool,
+}
+
+pub struct HighlightsTreeToolbarItemView {
+ tree_view: Option<Entity<HighlightsTreeView>>,
+ _subscription: Option<gpui::Subscription>,
+ toggle_settings_handle: PopoverMenuHandle<ContextMenu>,
+}
+
+struct EditorState {
+ editor: Entity<Editor>,
+ _subscription: gpui::Subscription,
+}
+
+impl HighlightsTreeView {
+ pub fn new(
+ workspace_handle: WeakEntity<Workspace>,
+ active_item: Option<Box<dyn ItemHandle>>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Self {
+ let mut this = Self {
+ workspace_handle: workspace_handle.clone(),
+ list_scroll_handle: UniformListScrollHandle::new(),
+ editor: None,
+ hovered_item_ix: None,
+ selected_item_ix: None,
+ focus_handle: cx.focus_handle(),
+ cached_entries: Vec::new(),
+ display_items: Vec::new(),
+ is_singleton: true,
+ show_text_highlights: true,
+ show_semantic_tokens: true,
+ skip_next_scroll: false,
+ };
+
+ this.handle_item_updated(active_item, window, cx);
+
+ cx.subscribe_in(
+ &workspace_handle.upgrade().unwrap(),
+ window,
+ move |this, workspace, event, window, cx| match event {
+ WorkspaceEvent::ItemAdded { .. } | WorkspaceEvent::ActiveItemChanged => {
+ this.handle_item_updated(workspace.read(cx).active_item(cx), window, cx)
+ }
+ WorkspaceEvent::ItemRemoved { item_id } => {
+ this.handle_item_removed(item_id, window, cx);
+ }
+ _ => {}
+ },
+ )
+ .detach();
+
+ this
+ }
+
+ fn handle_item_updated(
+ &mut self,
+ active_item: Option<Box<dyn ItemHandle>>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ let Some(editor) = active_item
+ .filter(|item| item.item_id() != cx.entity_id())
+ .and_then(|item| item.act_as::<Editor>(cx))
+ else {
+ return;
+ };
+
+ let is_different_editor = self
+ .editor
+ .as_ref()
+ .is_none_or(|state| state.editor != editor);
+ if is_different_editor {
+ self.set_editor(editor, window, cx);
+ }
+ }
+
+ fn handle_item_removed(
+ &mut self,
+ item_id: &EntityId,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) {
+ if self
+ .editor
+ .as_ref()
+ .is_some_and(|state| state.editor.entity_id() == *item_id)
+ {
+ self.editor = None;
+ self.cached_entries.clear();
+ self.display_items.clear();
+ cx.notify();
+ }
+ }
+
+ fn set_editor(&mut self, editor: Entity<Editor>, window: &mut Window, cx: &mut Context<Self>) {
+ if let Some(state) = &self.editor {
+ if state.editor == editor {
+ return;
+ }
+ let key = HighlightKey::HighlightsTreeView(editor.entity_id().as_u64() as usize);
+ editor.update(cx, |editor, cx| editor.clear_background_highlights(key, cx));
+ }
+
+ let subscription =
+ cx.subscribe_in(&editor, window, |this, _, event, window, cx| match event {
+ editor::EditorEvent::Reparsed(_)
+ | editor::EditorEvent::SelectionsChanged { .. } => {
+ this.refresh_highlights(window, cx);
+ }
+ _ => return,
+ });
+
+ self.editor = Some(EditorState {
+ editor,
+ _subscription: subscription,
+ });
+ self.refresh_highlights(window, cx);
+ }
+
+ fn refresh_highlights(&mut self, _window: &mut Window, cx: &mut Context<Self>) {
+ let Some(editor_state) = self.editor.as_ref() else {
+ self.cached_entries.clear();
+ self.display_items.clear();
+ cx.notify();
+ return;
+ };
+
+ let (display_map, project, multi_buffer, cursor_position) = {
+ let editor = editor_state.editor.read(cx);
+ let cursor = editor.selections.newest_anchor().head();
+ (
+ editor.display_map.clone(),
+ editor.project().cloned(),
+ editor.buffer().clone(),
+ cursor,
+ )
+ };
+ let Some(project) = project else {
+ return;
+ };
+
+ let multi_buffer_snapshot = multi_buffer.read(cx).snapshot(cx);
+ let is_singleton = multi_buffer_snapshot.is_singleton();
+ self.is_singleton = is_singleton;
+
+ let mut entries = Vec::new();
+
+ display_map.update(cx, |display_map, cx| {
+ for (key, text_highlights) in display_map.all_text_highlights() {
+ for range in &text_highlights.1 {
+ let excerpt_id = range.start.excerpt_id;
+ let (range_display, sort_key) = format_anchor_range(
+ range,
+ excerpt_id,
+ &multi_buffer_snapshot,
+ is_singleton,
+ );
+ entries.push(HighlightEntry {
+ excerpt_id,
+ range: range.clone(),
+ range_display,
+ style: text_highlights.0,
+ category: HighlightCategory::Text(*key),
+ sort_key,
+ });
+ }
+ }
+
+ project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
+ for (buffer_id, (tokens, interner)) in display_map.all_semantic_token_highlights() {
+ let language_name = multi_buffer
+ .read(cx)
+ .buffer(*buffer_id)
+ .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
+ for token in tokens.iter() {
+ let range: Range<Anchor> = token.range.start.into()..token.range.end.into();
+ let excerpt_id = range.start.excerpt_id;
+ let (range_display, sort_key) = format_anchor_range(
+ &range,
+ excerpt_id,
+ &multi_buffer_snapshot,
+ is_singleton,
+ );
+ let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
+ token.server_id,
+ language_name.as_ref(),
+ cx,
+ ) else {
+ continue;
+ };
+ entries.push(HighlightEntry {
+ excerpt_id,
+ range,
+ range_display,
+ style: interner[token.style],
+ category: HighlightCategory::SemanticToken {
+ token_type: stylizer.token_type_name(token.token_type).cloned(),
+ token_modifiers: stylizer
+ .token_modifiers(token.token_modifiers)
+ .map(SharedString::from),
+ },
+ sort_key,
+ });
+ }
+ }
+ });
+ });
+
+ entries.sort_by(|a, b| {
+ a.sort_key
+ .cmp(&b.sort_key)
+ .then_with(|| a.category.cmp(&b.category))
+ });
+ entries.dedup_by(|a, b| a.sort_key == b.sort_key && a.category == b.category);
+
+ self.cached_entries = entries;
+ self.rebuild_display_items(&multi_buffer_snapshot, cx);
+
+ if self.skip_next_scroll {
+ self.skip_next_scroll = false;
+ } else {
+ self.scroll_to_cursor_position(&cursor_position, &multi_buffer_snapshot);
+ }
+ cx.notify();
+ }
+
+ fn rebuild_display_items(&mut self, snapshot: &MultiBufferSnapshot, cx: &App) {
+ self.display_items.clear();
+
+ let mut last_excerpt_id: Option<ExcerptId> = None;
+
+ for (entry_ix, entry) in self.cached_entries.iter().enumerate() {
+ if !self.should_show_entry(entry) {
+ continue;
+ }
+
+ if !self.is_singleton {
+ let excerpt_changed =
+ last_excerpt_id.is_none_or(|last_id| last_id != entry.excerpt_id);
+ if excerpt_changed {
+ last_excerpt_id = Some(entry.excerpt_id);
+ let label = excerpt_label_for(entry.excerpt_id, snapshot, cx);
+ self.display_items
+ .push(DisplayItem::ExcerptSeparator { label });
+ }
+ }
+
+ self.display_items.push(DisplayItem::Entry { entry_ix });
+ }
+ }
+
+ fn should_show_entry(&self, entry: &HighlightEntry) -> bool {
+ match entry.category {
+ HighlightCategory::Text(_) => self.show_text_highlights,
+ HighlightCategory::SemanticToken { .. } => self.show_semantic_tokens,
+ }
+ }
+
+ fn scroll_to_cursor_position(&mut self, cursor: &Anchor, snapshot: &MultiBufferSnapshot) {
+ let cursor_point = cursor.to_point(snapshot);
+ let cursor_key = (cursor_point.row, cursor_point.column);
+ let cursor_excerpt = cursor.excerpt_id;
+
+ let best = self
+ .display_items
+ .iter()
+ .enumerate()
+ .filter_map(|(display_ix, item)| match item {
+ DisplayItem::Entry { entry_ix } => {
+ let entry = &self.cached_entries[*entry_ix];
+ Some((display_ix, *entry_ix, entry))
+ }
+ _ => None,
+ })
+ .filter(|(_, _, entry)| {
+ let (excerpt_id, start_row, start_col, end_row, end_col) = entry.sort_key;
+ if !self.is_singleton && excerpt_id != cursor_excerpt {
+ return false;
+ }
+ let start = (start_row, start_col);
+ let end = (end_row, end_col);
+ cursor_key >= start && cursor_key <= end
+ })
+ .min_by_key(|(_, _, entry)| {
+ let (_, start_row, start_col, end_row, end_col) = entry.sort_key;
+ (end_row - start_row, end_col.saturating_sub(start_col))
+ })
+ .map(|(display_ix, entry_ix, _)| (display_ix, entry_ix));
+
+ if let Some((display_ix, entry_ix)) = best {
+ self.selected_item_ix = Some(entry_ix);
+ self.list_scroll_handle
+ .scroll_to_item(display_ix, ScrollStrategy::Center);
+ }
+ }
+
+ fn update_editor_with_range_for_entry(
+ &self,
+ entry_ix: usize,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ mut f: impl FnMut(&mut Editor, Range<Anchor>, usize, &mut Window, &mut Context<Editor>),
+ ) -> Option<()> {
+ let editor_state = self.editor.as_ref()?;
+ let entry = self.cached_entries.get(entry_ix)?;
+ let range = entry.range.clone();
+ let key = cx.entity_id().as_u64() as usize;
+
+ editor_state.editor.update(cx, |editor, cx| {
+ f(editor, range, key, window, cx);
+ });
+ Some(())
+ }
+
+ fn render_entry(&self, entry: &HighlightEntry, selected: bool, cx: &App) -> Div {
+ let colors = cx.theme().colors();
+ let style_preview = render_style_preview(entry.style, cx);
+
+ h_flex()
+ .gap_1()
+ .child(style_preview)
+ .child(Label::new(entry.range_display.clone()).color(Color::Default))
+ .child(
+ Label::new(entry.category.label())
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ .text_bg(if selected {
+ colors.element_selected
+ } else {
+ Hsla::default()
+ })
+ .pl(rems(0.5))
+ .hover(|style| style.bg(colors.element_hover))
+ }
+
+ fn render_separator(&self, label: &SharedString, cx: &App) -> Div {
+ let colors = cx.theme().colors();
+ h_flex()
+ .gap_1()
+ .px(rems(0.5))
+ .bg(colors.surface_background)
+ .border_b_1()
+ .border_color(colors.border_variant)
+ .child(
+ Label::new(label.clone())
+ .size(LabelSize::Small)
+ .color(Color::Muted),
+ )
+ }
+
+ fn compute_items(
+ &mut self,
+ visible_range: Range<usize>,
+ _window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Vec<Div> {
+ let mut items = Vec::new();
+
+ for display_ix in visible_range {
+ let Some(display_item) = self.display_items.get(display_ix) else {
+ continue;
+ };
+
+ match display_item {
+ DisplayItem::ExcerptSeparator { label } => {
+ items.push(self.render_separator(label, cx));
+ }
+ DisplayItem::Entry { entry_ix } => {
+ let entry_ix = *entry_ix;
+ let entry = &self.cached_entries[entry_ix];
+ let selected = Some(entry_ix) == self.selected_item_ix;
+ let rendered = self
+ .render_entry(entry, selected, cx)
+ .on_mouse_down(
+ MouseButton::Left,
+ cx.listener(move |tree_view, _: &MouseDownEvent, window, cx| {
+ tree_view.selected_item_ix = Some(entry_ix);
+ tree_view.skip_next_scroll = true;
+ tree_view.update_editor_with_range_for_entry(
+ entry_ix,
+ window,
+ cx,
+ |editor, mut range, _, window, cx| {
+ mem::swap(&mut range.start, &mut range.end);
+ editor.change_selections(
+ SelectionEffects::scroll(Autoscroll::newest()),
+ window,
+ cx,
+ |selections| {
+ selections.select_ranges([range]);
+ },
+ );
+ },
+ );
+ cx.notify();
+ }),
+ )
+ .on_mouse_move(cx.listener(
+ move |tree_view, _: &MouseMoveEvent, window, cx| {
+ if tree_view.hovered_item_ix != Some(entry_ix) {
+ tree_view.hovered_item_ix = Some(entry_ix);
+ tree_view.update_editor_with_range_for_entry(
+ entry_ix,
+ window,
+ cx,
+ |editor, range, key, _, cx| {
+ Self::set_editor_highlights(editor, key, &[range], cx);
+ },
+ );
+ cx.notify();
+ }
+ },
+ ));
+
+ items.push(rendered);
+ }
+ }
+ }
+
+ items
+ }
+
+ fn set_editor_highlights(
+ editor: &mut Editor,
+ key: usize,
+ ranges: &[Range<Anchor>],
+ cx: &mut Context<Editor>,
+ ) {
+ editor.highlight_background_key(
+ HighlightKey::HighlightsTreeView(key),
+ ranges,
+ |_, theme| theme.colors().editor_document_highlight_write_background,
+ cx,
+ );
+ }
+
+ fn clear_editor_highlights(editor: &Entity<Editor>, cx: &mut Context<Self>) {
+ let highlight_key = HighlightKey::HighlightsTreeView(cx.entity_id().as_u64() as usize);
+ editor.update(cx, |editor, cx| {
+ editor.clear_background_highlights(highlight_key, cx);
+ });
+ }
+
+ fn select_previous(&mut self, _: &SelectPrevious, window: &mut Window, cx: &mut Context<Self>) {
+ self.move_selection(-1, window, cx);
+ }
+
+ fn select_next(&mut self, _: &SelectNext, window: &mut Window, cx: &mut Context<Self>) {
+ self.move_selection(1, window, cx);
+ }
+
+ fn move_selection(&mut self, delta: i32, window: &mut Window, cx: &mut Context<Self>) {
+ if self.display_items.is_empty() {
+ return;
+ }
+
+ let entry_display_items: Vec<(usize, usize)> = self
+ .display_items
+ .iter()
+ .enumerate()
+ .filter_map(|(display_ix, item)| match item {
+ DisplayItem::Entry { entry_ix } => Some((display_ix, *entry_ix)),
+ _ => None,
+ })
+ .collect();
+
+ if entry_display_items.is_empty() {
+ return;
+ }
+
+ let current_pos = self
+ .selected_item_ix
+ .and_then(|selected| {
+ entry_display_items
+ .iter()
+ .position(|(_, entry_ix)| *entry_ix == selected)
+ })
+ .unwrap_or(0);
+
+ let new_pos = if delta < 0 {
+ current_pos.saturating_sub((-delta) as usize)
+ } else {
+ (current_pos + delta as usize).min(entry_display_items.len() - 1)
+ };
+
+ if let Some(&(display_ix, entry_ix)) = entry_display_items.get(new_pos) {
+ self.selected_item_ix = Some(entry_ix);
+ self.skip_next_scroll = true;
+ self.list_scroll_handle
+ .scroll_to_item(display_ix, ScrollStrategy::Center);
+
+ self.update_editor_with_range_for_entry(
+ entry_ix,
+ window,
+ cx,
+ |editor, mut range, _, window, cx| {
+ mem::swap(&mut range.start, &mut range.end);
+ editor.change_selections(
+ SelectionEffects::scroll(Autoscroll::newest()),
+ window,
+ cx,
+ |selections| {
+ selections.select_ranges([range]);
+ },
+ );
+ },
+ );
+
+ cx.notify();
+ }
+ }
+
+ fn entry_count(&self) -> usize {
+ self.cached_entries
+ .iter()
+ .filter(|entry| self.should_show_entry(entry))
+ .count()
+ }
+}
+
+impl Render for HighlightsTreeView {
+ fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let display_count = self.display_items.len();
+
+ div()
+ .flex_1()
+ .track_focus(&self.focus_handle)
+ .key_context("HighlightsTreeView")
+ .on_action(cx.listener(Self::select_previous))
+ .on_action(cx.listener(Self::select_next))
+ .bg(cx.theme().colors().editor_background)
+ .map(|this| {
+ if display_count > 0 {
+ this.child(
+ uniform_list(
+ "HighlightsTreeView",
+ display_count,
+ cx.processor(move |this, range: Range<usize>, window, cx| {
+ this.compute_items(range, window, cx)
+ }),
+ )
+ .size_full()
+ .track_scroll(&self.list_scroll_handle)
+ .text_bg(cx.theme().colors().background)
+ .into_any_element(),
+ )
+ .vertical_scrollbar_for(&self.list_scroll_handle, window, cx)
+ .into_any_element()
+ } else {
+ let inner_content = v_flex()
+ .items_center()
+ .text_center()
+ .gap_2()
+ .max_w_3_5()
+ .map(|this| {
+ if self.editor.is_some() {
+ let has_any = !self.cached_entries.is_empty();
+ if has_any {
+ this.child(Label::new("All highlights are filtered out"))
+ .child(
+ Label::new(
+ "Enable text or semantic highlights in the toolbar",
+ )
+ .size(LabelSize::Small),
+ )
+ } else {
+ this.child(Label::new("No highlights found")).child(
+ Label::new(
+ "The editor has no text or semantic token highlights",
+ )
+ .size(LabelSize::Small),
+ )
+ }
+ } else {
+ this.child(Label::new("Not attached to an editor")).child(
+ Label::new("Focus an editor to show highlights")
+ .size(LabelSize::Small),
+ )
+ }
+ });
+
+ this.h_flex()
+ .size_full()
+ .justify_center()
+ .child(inner_content)
+ .into_any_element()
+ }
+ })
+ }
+}
+
+impl EventEmitter<()> for HighlightsTreeView {}
+
+impl Focusable for HighlightsTreeView {
+ fn focus_handle(&self, _: &App) -> gpui::FocusHandle {
+ self.focus_handle.clone()
+ }
+}
+
+impl Item for HighlightsTreeView {
+ type Event = ();
+
+ fn to_item_events(_: &Self::Event, _: impl FnMut(workspace::item::ItemEvent)) {}
+
+ fn tab_content_text(&self, _detail: usize, _cx: &App) -> SharedString {
+ "Highlights".into()
+ }
+
+ fn telemetry_event_text(&self) -> Option<&'static str> {
+ None
+ }
+
+ fn can_split(&self) -> bool {
+ true
+ }
+
+ fn clone_on_split(
+ &self,
+ _: Option<workspace::WorkspaceId>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> Task<Option<Entity<Self>>>
+ where
+ Self: Sized,
+ {
+ Task::ready(Some(cx.new(|cx| {
+ let mut clone = Self::new(self.workspace_handle.clone(), None, window, cx);
+ clone.show_text_highlights = self.show_text_highlights;
+ clone.show_semantic_tokens = self.show_semantic_tokens;
+ clone.skip_next_scroll = false;
+ if let Some(editor) = &self.editor {
+ clone.set_editor(editor.editor.clone(), window, cx)
+ }
+ clone
+ })))
+ }
+
+ fn on_removed(&self, cx: &mut Context<Self>) {
+ if let Some(state) = self.editor.as_ref() {
+ Self::clear_editor_highlights(&state.editor, cx);
+ }
+ }
+}
+
+impl Default for HighlightsTreeToolbarItemView {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl HighlightsTreeToolbarItemView {
+ pub fn new() -> Self {
+ Self {
+ tree_view: None,
+ _subscription: None,
+ toggle_settings_handle: PopoverMenuHandle::default(),
+ }
+ }
+
+ fn render_header(&self, cx: &Context<Self>) -> Option<ButtonLike> {
+ let tree_view = self.tree_view.as_ref()?;
+ let tree_view = tree_view.read(cx);
+
+ let total = tree_view.cached_entries.len();
+ let filtered = tree_view.entry_count();
+
+ let label = if filtered == total {
+ format!("{} highlights", total)
+ } else {
+ format!("{} / {} highlights", filtered, total)
+ };
+
+ Some(ButtonLike::new("highlights header").child(Label::new(label)))
+ }
+
+ fn render_settings_button(&self, cx: &Context<Self>) -> PopoverMenu<ContextMenu> {
+ let (show_text, show_semantic) = self
+ .tree_view
+ .as_ref()
+ .map(|view| {
+ let v = view.read(cx);
+ (v.show_text_highlights, v.show_semantic_tokens)
+ })
+ .unwrap_or((true, true));
+
+ let tree_view = self.tree_view.as_ref().map(|v| v.downgrade());
+
+ PopoverMenu::new("highlights-tree-settings")
+ .trigger_with_tooltip(
+ IconButton::new("toggle-highlights-settings-icon", IconName::Sliders)
+ .icon_size(IconSize::Small)
+ .style(ButtonStyle::Subtle)
+ .toggle_state(self.toggle_settings_handle.is_deployed()),
+ Tooltip::text("Highlights Settings"),
+ )
+ .anchor(Corner::TopRight)
+ .with_handle(self.toggle_settings_handle.clone())
+ .menu(move |window, cx| {
+ let tree_view_for_text = tree_view.clone();
+ let tree_view_for_semantic = tree_view.clone();
+
+ let menu = ContextMenu::build(window, cx, move |menu, _, _| {
+ menu.toggleable_entry(
+ "Text Highlights",
+ show_text,
+ IconPosition::Start,
+ Some(ToggleTextHighlights.boxed_clone()),
+ {
+ let tree_view = tree_view_for_text.clone();
+ move |_, cx| {
+ if let Some(view) = tree_view.as_ref() {
+ view.update(cx, |view, cx| {
+ view.show_text_highlights = !view.show_text_highlights;
+ let snapshot = view.editor.as_ref().map(|s| {
+ s.editor.read(cx).buffer().read(cx).snapshot(cx)
+ });
+ if let Some(snapshot) = snapshot {
+ view.rebuild_display_items(&snapshot, cx);
+ }
+ cx.notify();
+ })
+ .ok();
+ }
+ }
+ },
+ )
+ .toggleable_entry(
+ "Semantic Tokens",
+ show_semantic,
+ IconPosition::Start,
+ Some(ToggleSemanticTokens.boxed_clone()),
+ {
+ move |_, cx| {
+ if let Some(view) = tree_view_for_semantic.as_ref() {
+ view.update(cx, |view, cx| {
+ view.show_semantic_tokens = !view.show_semantic_tokens;
+ let snapshot = view.editor.as_ref().map(|s| {
+ s.editor.read(cx).buffer().read(cx).snapshot(cx)
+ });
+ if let Some(snapshot) = snapshot {
+ view.rebuild_display_items(&snapshot, cx);
+ }
+ cx.notify();
+ })
+ .ok();
+ }
+ }
+ },
+ )
+ });
+
+ Some(menu)
+ })
+ }
+}
+
+impl Render for HighlightsTreeToolbarItemView {
+ fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ h_flex()
+ .gap_1()
+ .children(self.render_header(cx))
+ .child(self.render_settings_button(cx))
+ }
+}
+
+impl EventEmitter<ToolbarItemEvent> for HighlightsTreeToolbarItemView {}
+
+impl ToolbarItemView for HighlightsTreeToolbarItemView {
+ fn set_active_pane_item(
+ &mut self,
+ active_pane_item: Option<&dyn ItemHandle>,
+ window: &mut Window,
+ cx: &mut Context<Self>,
+ ) -> ToolbarItemLocation {
+ if let Some(item) = active_pane_item
+ && let Some(view) = item.downcast::<HighlightsTreeView>()
+ {
+ self.tree_view = Some(view.clone());
+ self._subscription = Some(cx.observe_in(&view, window, |_, _, _, cx| cx.notify()));
+ return ToolbarItemLocation::PrimaryLeft;
+ }
+ self.tree_view = None;
+ self._subscription = None;
+ ToolbarItemLocation::Hidden
+ }
+}
+
+fn excerpt_label_for(
+ excerpt_id: ExcerptId,
+ snapshot: &MultiBufferSnapshot,
+ cx: &App,
+) -> SharedString {
+ let buffer = snapshot.buffer_for_excerpt(excerpt_id);
+ let path_label = buffer
+ .and_then(|buf| buf.file())
+ .map(|file| {
+ let full_path = file.full_path(cx);
+ full_path.to_string_lossy().to_string()
+ })
+ .unwrap_or_else(|| "untitled".to_string());
+ path_label.into()
+}
+
+fn format_anchor_range(
+ range: &Range<Anchor>,
+ excerpt_id: ExcerptId,
+ snapshot: &MultiBufferSnapshot,
+ is_singleton: bool,
+) -> (SharedString, (ExcerptId, u32, u32, u32, u32)) {
+ if is_singleton {
+ let start = range.start.to_point(snapshot);
+ let end = range.end.to_point(snapshot);
+ let display = SharedString::from(format!(
+ "[{}:{} - {}:{}]",
+ start.row + 1,
+ start.column + 1,
+ end.row + 1,
+ end.column + 1,
+ ));
+ let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
+ (display, sort_key)
+ } else {
+ let buffer = snapshot.buffer_for_excerpt(excerpt_id);
+ if let Some(buffer) = buffer {
+ let start = language::ToPoint::to_point(&range.start.text_anchor, buffer);
+ let end = language::ToPoint::to_point(&range.end.text_anchor, buffer);
+ let display = SharedString::from(format!(
+ "[{}:{} - {}:{}]",
+ start.row + 1,
+ start.column + 1,
+ end.row + 1,
+ end.column + 1,
+ ));
+ let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
+ (display, sort_key)
+ } else {
+ let start = range.start.to_point(snapshot);
+ let end = range.end.to_point(snapshot);
+ let display = SharedString::from(format!(
+ "[{}:{} - {}:{}]",
+ start.row + 1,
+ start.column + 1,
+ end.row + 1,
+ end.column + 1,
+ ));
+ let sort_key = (excerpt_id, start.row, start.column, end.row, end.column);
+ (display, sort_key)
+ }
+ }
+}
+
+fn render_style_preview(style: HighlightStyle, cx: &App) -> Div {
+ let colors = cx.theme().colors();
+
+ let display_color = style.color.or(style.background_color);
+
+ let mut preview = div().px_1().rounded_sm();
+
+ if let Some(color) = display_color {
+ preview = preview.bg(color);
+ } else {
+ preview = preview.bg(colors.element_background);
+ }
+
+ let mut parts = Vec::new();
+
+ if let Some(color) = display_color {
+ parts.push(format_hsla_as_hex(color));
+ }
+ if style.font_weight.is_some() {
+ parts.push("bold".to_string());
+ }
+ if style.font_style.is_some() {
+ parts.push("italic".to_string());
+ }
+ if style.strikethrough.is_some() {
+ parts.push("strike".to_string());
+ }
+ if style.underline.is_some() {
+ parts.push("underline".to_string());
+ }
+
+ let label_text = if parts.is_empty() {
+ "none".to_string()
+ } else {
+ parts.join(" ")
+ };
+
+ preview.child(Label::new(label_text).size(LabelSize::Small))
+}
+
+fn format_hsla_as_hex(color: Hsla) -> String {
+ let rgba = color.to_rgb();
+ let r = (rgba.r * 255.0).round() as u8;
+ let g = (rgba.g * 255.0).round() as u8;
+ let b = (rgba.b * 255.0).round() as u8;
+ let a = (rgba.a * 255.0).round() as u8;
+ if a == 255 {
+ format!("#{:02X}{:02X}{:02X}", r, g, b)
+ } else {
+ format!("#{:02X}{:02X}{:02X}{:02X}", r, g, b, a)
+ }
+}
@@ -1,3 +1,4 @@
+mod highlights_tree_view;
mod key_context_view;
pub mod lsp_button;
pub mod lsp_log_view;
@@ -8,12 +9,14 @@ mod lsp_log_view_tests;
use gpui::{App, AppContext, Entity};
+pub use highlights_tree_view::{HighlightsTreeToolbarItemView, HighlightsTreeView};
pub use lsp_log_view::LspLogView;
pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView};
use ui::{Context, Window};
use workspace::{Item, ItemHandle, SplitDirection, Workspace};
pub fn init(cx: &mut App) {
+ highlights_tree_view::init(cx);
lsp_log_view::init(false, cx);
syntax_tree_view::init(cx);
key_context_view::init(cx);
@@ -254,52 +254,7 @@ impl LanguageServerState {
lsp_store
.update(cx, |lsp_store, cx| {
if restart {
- let Some(workspace) = state.read(cx).workspace.upgrade() else {
- return;
- };
- let project = workspace.read(cx).project().clone();
- let path_style = project.read(cx).path_style(cx);
- let buffer_store = project.read(cx).buffer_store().clone();
- let buffers = state
- .read(cx)
- .language_servers
- .servers_per_buffer_abs_path
- .iter()
- .filter_map(|(abs_path, servers)| {
- let worktree =
- servers.worktree.as_ref()?.upgrade()?.read(cx);
- let relative_path =
- abs_path.strip_prefix(&worktree.abs_path()).ok()?;
- let relative_path =
- RelPath::new(relative_path, path_style)
- .log_err()?;
- let entry = worktree.entry_for_path(&relative_path)?;
- let project_path =
- project.read(cx).path_for_entry(entry.id, cx)?;
- buffer_store.read(cx).get_by_path(&project_path)
- })
- .collect();
- let selectors = state
- .read(cx)
- .items
- .iter()
- // Do not try to use IDs as we have stopped all servers already, when allowing to restart them all
- .flat_map(|item| match item {
- LspMenuItem::Header { .. } => None,
- LspMenuItem::ToggleServersButton { .. } => None,
- LspMenuItem::WithHealthCheck { health, .. } => Some(
- LanguageServerSelector::Name(health.name.clone()),
- ),
- LspMenuItem::WithBinaryStatus {
- server_name, ..
- } => Some(LanguageServerSelector::Name(
- server_name.clone(),
- )),
- })
- .collect();
- lsp_store.restart_language_servers_for_buffers(
- buffers, selectors, cx,
- );
+ lsp_store.restart_all_language_servers(cx);
} else {
lsp_store.stop_all_language_servers(cx);
}
@@ -1,5 +1,8 @@
use command_palette_hooks::CommandPaletteFilter;
-use editor::{Anchor, Editor, ExcerptId, MultiBufferOffset, SelectionEffects, scroll::Autoscroll};
+use editor::{
+ Anchor, Editor, ExcerptId, HighlightKey, MultiBufferOffset, SelectionEffects,
+ scroll::Autoscroll,
+};
use gpui::{
App, AppContext as _, Context, Div, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
Hsla, InteractiveElement, IntoElement, MouseButton, MouseDownEvent, MouseMoveEvent,
@@ -218,9 +221,8 @@ impl SyntaxTreeView {
if state.editor == editor {
return;
}
- editor.update(cx, |editor, cx| {
- editor.clear_background_highlights::<Self>(cx)
- });
+ let key = HighlightKey::SyntaxTreeView(cx.entity_id().as_u64() as usize);
+ editor.update(cx, |editor, cx| editor.clear_background_highlights(key, cx));
}
let subscription = cx.subscribe_in(&editor, window, |this, _, event, window, cx| {
@@ -482,8 +484,8 @@ impl SyntaxTreeView {
ranges: &[Range<Anchor>],
cx: &mut Context<Editor>,
) {
- editor.highlight_background_key::<Self>(
- key,
+ editor.highlight_background_key(
+ HighlightKey::SyntaxTreeView(key),
ranges,
|_, theme| theme.colors().editor_document_highlight_write_background,
cx,
@@ -491,9 +493,9 @@ impl SyntaxTreeView {
}
fn clear_editor_highlights(editor: &Entity<Editor>, cx: &mut Context<Self>) {
- let highlight_key = cx.entity_id().as_u64() as usize;
+ let highlight_key = HighlightKey::SyntaxTreeView(cx.entity_id().as_u64() as usize);
editor.update(cx, |editor, cx| {
- editor.clear_background_highlights_key::<Self>(highlight_key, cx);
+ editor.clear_background_highlights(highlight_key, cx);
});
}
}
@@ -5,7 +5,7 @@ use project::Fs;
use python::PyprojectTomlManifestProvider;
use rust::CargoManifestProvider;
use rust_embed::RustEmbed;
-use settings::SettingsStore;
+use settings::{SemanticTokenRules, SettingsStore};
use smol::stream::StreamExt;
use std::{str, sync::Arc};
use util::{ResultExt, asset_str};
@@ -183,12 +183,14 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
context: Some(python_context_provider),
toolchain: Some(python_toolchain_provider),
manifest_name: Some(SharedString::new_static("pyproject.toml").into()),
+ ..Default::default()
},
LanguageInfo {
name: "rust",
adapters: vec![rust_lsp_adapter],
context: Some(rust_context_provider),
manifest_name: Some(SharedString::new_static("Cargo.toml").into()),
+ semantic_token_rules: Some(rust::semantic_token_rules()),
..Default::default()
},
LanguageInfo {
@@ -242,6 +244,8 @@ pub fn init(languages: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, node: NodeRuntime
registration.context,
registration.toolchain,
registration.manifest_name,
+ registration.semantic_token_rules,
+ cx,
);
}
@@ -349,6 +353,7 @@ struct LanguageInfo {
context: Option<Arc<dyn ContextProvider>>,
toolchain: Option<Arc<dyn ToolchainLister>>,
manifest_name: Option<ManifestName>,
+ semantic_token_rules: Option<SemanticTokenRules>,
}
fn register_language(
@@ -358,8 +363,15 @@ fn register_language(
context: Option<Arc<dyn ContextProvider>>,
toolchain: Option<Arc<dyn ToolchainLister>>,
manifest_name: Option<ManifestName>,
+ semantic_token_rules: Option<SemanticTokenRules>,
+ cx: &mut App,
) {
let config = load_config(name);
+ if let Some(rules) = &semantic_token_rules {
+ SettingsStore::update_global(cx, |store, _| {
+ store.set_language_semantic_token_rules(config.name.0.clone(), rules.clone());
+ });
+ }
for adapter in adapters {
languages.register_lsp_adapter(config.name.clone(), adapter);
}
@@ -13,7 +13,7 @@ use project::lsp_store::rust_analyzer_ext::CARGO_DIAGNOSTICS_SOURCE_NAME;
use project::project_settings::ProjectSettings;
use regex::Regex;
use serde_json::json;
-use settings::Settings as _;
+use settings::{SemanticTokenRules, Settings as _};
use smallvec::SmallVec;
use smol::fs::{self};
use std::cmp::Reverse;
@@ -31,8 +31,17 @@ use util::merge_json_value_into;
use util::rel_path::RelPath;
use util::{ResultExt, maybe};
+use crate::LanguageDir;
use crate::language_settings::language_settings;
+pub(crate) fn semantic_token_rules() -> SemanticTokenRules {
+ let content = LanguageDir::get("rust/semantic_token_rules.json")
+ .expect("missing rust/semantic_token_rules.json");
+ let json = std::str::from_utf8(&content.data).expect("invalid utf-8 in semantic_token_rules");
+ settings::parse_json_with_comments::<SemanticTokenRules>(json)
+ .expect("failed to parse rust semantic_token_rules.json")
+}
+
pub struct RustLspAdapter;
#[cfg(target_os = "macos")]
@@ -0,0 +1,154 @@
+[
+ {
+ "token_type": "angle",
+ "style": ["punctuation.bracket"]
+ },
+ {
+ "token_type": "arithmetic",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "attribute",
+ "style": ["attribute", "decorator"]
+ },
+ {
+ "token_type": "attributeBracket",
+ "style": ["punctuation.bracket"]
+ },
+ {
+ "token_type": "bitwise",
+ "style": ["operator"]
+ },
+ {
+ "token_type": "boolean",
+ "style": ["boolean"]
+ },
+ {
+ "token_type": "brace",
+ "style": ["punctuation.bracket"]
+ },
+ {
+ "token_type": "bracket",
+ "style": ["punctuation.bracket"]
+ },
+ {
+ "token_type": "builtinAttribute",
+ "style": ["attribute", "decorator"]
+ },
+ {
+ "token_type": "builtinType",
+ "style": ["type"]
+ },
+ {
+ "token_type": "character",
+ "style": ["text.literal"]
+ },
+ {
+ "token_type": "colon",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "comma",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "comparison",
+ "style": ["operator"]
+ },
+ {
+ "token_type": "constParameter",
+ "style": ["constant"]
+ },
+ {
+ "token_type": "const",
+ "style": ["constant"]
+ },
+ {
+ "token_type": "derive",
+ "style": ["attribute", "decorator"]
+ },
+ {
+ "token_type": "deriveHelper",
+ "style": ["attribute", "decorator"]
+ },
+ {
+ "token_type": "dot",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "escapeSequence",
+ "style": ["string.escape"]
+ },
+ {
+ "token_type": "formatSpecifier",
+ "style": ["string.special"]
+ },
+ {
+ "token_type": "invalidEscapeSequence",
+ "style": ["string.escape"]
+ },
+ {
+ "token_type": "label",
+ "style": ["lifetime"]
+ },
+ {
+ "token_type": "lifetime",
+ "style": ["lifetime"]
+ },
+ {
+ "token_type": "logical",
+ "style": ["operator"]
+ },
+ {
+ "token_type": "macroBang",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "parenthesis",
+ "style": ["punctuation.bracket"]
+ },
+ {
+ "token_type": "procMacro",
+ "style": ["function"]
+ },
+ {
+ "token_type": "punctuation",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "operator",
+ "style": ["operator"]
+ },
+ {
+ "token_type": "selfKeyword",
+ "style": ["variable.special"]
+ },
+ {
+ "token_type": "selfTypeKeyword",
+ "style": ["type"]
+ },
+ {
+ "token_type": "semicolon",
+ "style": ["punctuation"]
+ },
+ {
+ "token_type": "static",
+ "style": ["constant"]
+ },
+ {
+ "token_type": "toolModule",
+ "style": ["attribute", "decorator"]
+ },
+ {
+ "token_type": "typeAlias",
+ "style": ["type"]
+ },
+ {
+ "token_type": "union",
+ "style": ["type"]
+ },
+ {
+ "token_type": "unresolvedReference",
+ "style": ["variable"]
+ }
+]
@@ -318,7 +318,7 @@ where
}
/// Combined capabilities of the server and the adapter.
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct AdapterServerCapabilities {
// Reported capabilities by the server
pub server_capabilities: ServerCapabilities,
@@ -326,6 +326,56 @@ pub struct AdapterServerCapabilities {
pub code_action_kinds: Option<Vec<CodeActionKind>>,
}
+// See the VSCode docs [1] and the LSP Spec [2]
+//
+// [1]: https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide#standard-token-types-and-modifiers
+// [2]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes
+pub const SEMANTIC_TOKEN_TYPES: &[SemanticTokenType] = &[
+ SemanticTokenType::NAMESPACE,
+ SemanticTokenType::CLASS,
+ SemanticTokenType::ENUM,
+ SemanticTokenType::INTERFACE,
+ SemanticTokenType::STRUCT,
+ SemanticTokenType::TYPE_PARAMETER,
+ SemanticTokenType::TYPE,
+ SemanticTokenType::PARAMETER,
+ SemanticTokenType::VARIABLE,
+ SemanticTokenType::PROPERTY,
+ SemanticTokenType::ENUM_MEMBER,
+ SemanticTokenType::DECORATOR,
+ SemanticTokenType::FUNCTION,
+ SemanticTokenType::METHOD,
+ SemanticTokenType::MACRO,
+ SemanticTokenType::new("label"), // Not in the spec, but in the docs.
+ SemanticTokenType::COMMENT,
+ SemanticTokenType::STRING,
+ SemanticTokenType::KEYWORD,
+ SemanticTokenType::NUMBER,
+ SemanticTokenType::REGEXP,
+ SemanticTokenType::OPERATOR,
+ SemanticTokenType::MODIFIER, // Only in the spec, not in the docs.
+ // Language specific things below.
+ // C#
+ SemanticTokenType::EVENT,
+ // Rust
+ SemanticTokenType::new("lifetime"),
+];
+pub const SEMANTIC_TOKEN_MODIFIERS: &[SemanticTokenModifier] = &[
+ SemanticTokenModifier::DECLARATION,
+ SemanticTokenModifier::DEFINITION,
+ SemanticTokenModifier::READONLY,
+ SemanticTokenModifier::STATIC,
+ SemanticTokenModifier::DEPRECATED,
+ SemanticTokenModifier::ABSTRACT,
+ SemanticTokenModifier::ASYNC,
+ SemanticTokenModifier::MODIFICATION,
+ SemanticTokenModifier::DOCUMENTATION,
+ SemanticTokenModifier::DEFAULT_LIBRARY,
+ // Language specific things below.
+ // Rust
+ SemanticTokenModifier::new("constant"),
+];
+
impl LanguageServer {
/// Starts a language server process.
pub fn new(
@@ -659,7 +709,12 @@ impl LanguageServer {
Ok(())
}
- pub fn default_initialize_params(&self, pull_diagnostics: bool, cx: &App) -> InitializeParams {
+ pub fn default_initialize_params(
+ &self,
+ pull_diagnostics: bool,
+ augments_syntax_tokens: bool,
+ cx: &App,
+ ) -> InitializeParams {
let workspace_folders = self.workspace_folders.as_ref().map_or_else(
|| {
vec![WorkspaceFolder {
@@ -736,6 +791,9 @@ impl LanguageServer {
execute_command: Some(ExecuteCommandClientCapabilities {
dynamic_registration: Some(true),
}),
+ semantic_tokens: Some(SemanticTokensWorkspaceClientCapabilities {
+ refresh_support: Some(true),
+ }),
..WorkspaceClientCapabilities::default()
}),
text_document: Some(TextDocumentClientCapabilities {
@@ -836,6 +894,20 @@ impl LanguageServer {
}),
dynamic_registration: Some(true),
}),
+ semantic_tokens: Some(SemanticTokensClientCapabilities {
+ dynamic_registration: Some(false),
+ requests: SemanticTokensClientCapabilitiesRequests {
+ range: None,
+ full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
+ },
+ token_types: SEMANTIC_TOKEN_TYPES.to_vec(),
+ token_modifiers: SEMANTIC_TOKEN_MODIFIERS.to_vec(),
+ formats: vec![TokenFormat::RELATIVE],
+ overlapping_token_support: Some(true),
+ multiline_token_support: Some(true),
+ server_cancel_support: Some(true),
+ augments_syntax_tokens: Some(augments_syntax_tokens),
+ }),
publish_diagnostics: Some(PublishDiagnosticsClientCapabilities {
related_information: Some(true),
version_support: Some(true),
@@ -1919,7 +1991,7 @@ mod tests {
let server = cx
.update(|cx| {
- let params = server.default_initialize_params(false, cx);
+ let params = server.default_initialize_params(false, false, cx);
let configuration = DidChangeConfigurationParams {
settings: Default::default(),
};
@@ -8,10 +8,22 @@ use std::{
};
use sum_tree::Bias;
+/// A stable reference to a position within a [`MultiBuffer`](super::MultiBuffer).
+///
+/// Unlike simple offsets, anchors remain valid as the text is edited, automatically
+/// adjusting to reflect insertions and deletions around them.
#[derive(Clone, Copy, Eq, PartialEq, Hash)]
pub struct Anchor {
+ /// Identifies which excerpt within the multi-buffer this anchor belongs to.
+ /// A multi-buffer can contain multiple excerpts from different buffers.
pub excerpt_id: ExcerptId,
+ /// The position within the excerpt's underlying buffer. This is a stable
+ /// reference that remains valid as the buffer text is edited.
pub text_anchor: text::Anchor,
+ /// When present, indicates this anchor points into deleted text within an
+ /// expanded diff hunk. The anchor references a position in the diff base
+ /// (original) text rather than the current buffer text. This is used when
+ /// displaying inline diffs where deleted lines are shown.
pub diff_base_anchor: Option<text::Anchor>,
}
@@ -257,3 +269,230 @@ impl AnchorRangeExt for Range<Anchor> {
self.start.to_point(content)..self.end.to_point(content)
}
}
+
+/// An [`Anchor`] without a diff base anchor.
+///
+/// The main benefit of this type is that it almost half the size of a full anchor.
+/// Store this if you know you are never working with diff base anchors.
+#[derive(Clone, Copy, Eq, PartialEq, Hash)]
+pub struct DiffbaselessAnchor {
+ /// Identifies which excerpt within the multi-buffer this anchor belongs to.
+ /// A multi-buffer can contain multiple excerpts from different buffers.
+ pub excerpt_id: ExcerptId,
+ /// The position within the excerpt's underlying buffer. This is a stable
+ /// reference that remains valid as the buffer text is edited.
+ pub text_anchor: text::Anchor,
+}
+
+impl std::fmt::Debug for DiffbaselessAnchor {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ if self.is_min() {
+ return write!(f, "Anchor::min({:?})", self.text_anchor.buffer_id);
+ }
+ if self.is_max() {
+ return write!(f, "Anchor::max({:?})", self.text_anchor.buffer_id);
+ }
+
+ f.debug_struct("Anchor")
+ .field("excerpt_id", &self.excerpt_id)
+ .field("text_anchor", &self.text_anchor)
+ .finish()
+ }
+}
+
+impl DiffbaselessAnchor {
+ pub fn in_buffer(excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Self {
+ Self {
+ excerpt_id,
+ text_anchor,
+ }
+ }
+
+ pub fn range_in_buffer(excerpt_id: ExcerptId, range: Range<text::Anchor>) -> Range<Self> {
+ Self::in_buffer(excerpt_id, range.start)..Self::in_buffer(excerpt_id, range.end)
+ }
+
+ pub fn min() -> Self {
+ Self {
+ excerpt_id: ExcerptId::min(),
+ text_anchor: text::Anchor::MIN,
+ }
+ }
+
+ pub fn max() -> Self {
+ Self {
+ excerpt_id: ExcerptId::max(),
+ text_anchor: text::Anchor::MAX,
+ }
+ }
+
+ pub fn is_min(&self) -> bool {
+ self.excerpt_id == ExcerptId::min() && self.text_anchor.is_min()
+ }
+
+ pub fn is_max(&self) -> bool {
+ self.excerpt_id == ExcerptId::max() && self.text_anchor.is_max()
+ }
+
+ pub fn cmp(&self, other: &DiffbaselessAnchor, snapshot: &MultiBufferSnapshot) -> Ordering {
+ if self == other {
+ return Ordering::Equal;
+ }
+
+ let self_excerpt_id = snapshot.latest_excerpt_id(self.excerpt_id);
+ let other_excerpt_id = snapshot.latest_excerpt_id(other.excerpt_id);
+
+ let excerpt_id_cmp = self_excerpt_id.cmp(&other_excerpt_id, snapshot);
+ if excerpt_id_cmp.is_ne() {
+ return excerpt_id_cmp;
+ }
+ if self_excerpt_id == ExcerptId::max()
+ && self.text_anchor.is_max()
+ && self.text_anchor.is_max()
+ {
+ return Ordering::Equal;
+ }
+ if let Some(excerpt) = snapshot.excerpt(self_excerpt_id) {
+ let text_cmp = self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer);
+ if text_cmp.is_ne() {
+ return text_cmp;
+ }
+ }
+ Ordering::Equal
+ }
+
+ pub fn bias(&self) -> Bias {
+ self.text_anchor.bias
+ }
+
+ pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> DiffbaselessAnchor {
+ if self.text_anchor.bias != Bias::Left
+ && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
+ {
+ return Self {
+ excerpt_id: excerpt.id,
+ text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
+ };
+ }
+ *self
+ }
+
+ pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> DiffbaselessAnchor {
+ if self.text_anchor.bias != Bias::Right
+ && let Some(excerpt) = snapshot.excerpt(self.excerpt_id)
+ {
+ return Self {
+ excerpt_id: excerpt.id,
+ text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
+ };
+ }
+ *self
+ }
+
+ pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
+ where
+ D: MultiBufferDimension
+ + Ord
+ + Sub<Output = D::TextDimension>
+ + Sub<D::TextDimension, Output = D>
+ + AddAssign<D::TextDimension>
+ + Add<D::TextDimension, Output = D>,
+ D::TextDimension: Sub<Output = D::TextDimension> + Ord,
+ {
+ snapshot.summary_for_anchor(&Anchor {
+ excerpt_id: self.excerpt_id,
+ text_anchor: self.text_anchor,
+ diff_base_anchor: None,
+ })
+ }
+
+ pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
+ if self.is_min() || self.is_max() {
+ true
+ } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
+ (self.text_anchor == excerpt.range.context.start
+ || self.text_anchor == excerpt.range.context.end
+ || self.text_anchor.is_valid(&excerpt.buffer))
+ && excerpt.contains_diffbaseless(self)
+ } else {
+ false
+ }
+ }
+}
+
+impl ToOffset for DiffbaselessAnchor {
+ fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffset {
+ self.summary(snapshot)
+ }
+ fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> MultiBufferOffsetUtf16 {
+ self.summary(snapshot)
+ }
+}
+
+impl ToPoint for DiffbaselessAnchor {
+ fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
+ self.summary(snapshot)
+ }
+ fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> rope::PointUtf16 {
+ self.summary(snapshot)
+ }
+}
+
+pub trait DiffbaselessAnchorRangeExt {
+ fn cmp(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> Ordering;
+ fn includes(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool;
+ fn overlaps(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool;
+ fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<MultiBufferOffset>;
+ fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
+}
+
+impl DiffbaselessAnchorRangeExt for Range<DiffbaselessAnchor> {
+ fn cmp(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> Ordering {
+ match self.start.cmp(&other.start, buffer) {
+ Ordering::Equal => other.end.cmp(&self.end, buffer),
+ ord => ord,
+ }
+ }
+
+ fn includes(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool {
+ self.start.cmp(&other.start, buffer).is_le() && other.end.cmp(&self.end, buffer).is_le()
+ }
+
+ fn overlaps(&self, other: &Range<DiffbaselessAnchor>, buffer: &MultiBufferSnapshot) -> bool {
+ self.end.cmp(&other.start, buffer).is_ge() && self.start.cmp(&other.end, buffer).is_le()
+ }
+
+ fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<MultiBufferOffset> {
+ self.start.to_offset(content)..self.end.to_offset(content)
+ }
+
+ fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
+ self.start.to_point(content)..self.end.to_point(content)
+ }
+}
+
+pub struct AnchorHasDiffbaseError;
+
+impl TryFrom<Anchor> for DiffbaselessAnchor {
+ type Error = AnchorHasDiffbaseError;
+
+ fn try_from(anchor: Anchor) -> Result<Self, AnchorHasDiffbaseError> {
+ if anchor.diff_base_anchor.is_some() {
+ return Err(AnchorHasDiffbaseError);
+ }
+ Ok(DiffbaselessAnchor {
+ excerpt_id: anchor.excerpt_id,
+ text_anchor: anchor.text_anchor,
+ })
+ }
+}
+
+impl From<DiffbaselessAnchor> for Anchor {
+ fn from(diffbaseless: DiffbaselessAnchor) -> Self {
+ Anchor {
+ excerpt_id: diffbaseless.excerpt_id,
+ text_anchor: diffbaseless.text_anchor,
+ diff_base_anchor: None,
+ }
+ }
+}
@@ -6,7 +6,9 @@ mod transaction;
use self::transaction::History;
-pub use anchor::{Anchor, AnchorRangeExt};
+pub use anchor::{
+ Anchor, AnchorHasDiffbaseError, AnchorRangeExt, DiffbaselessAnchor, DiffbaselessAnchorRangeExt,
+};
use anyhow::{Result, anyhow};
use buffer_diff::{
@@ -7299,6 +7301,23 @@ impl Excerpt {
.is_ge()
}
+ fn contains_diffbaseless(&self, anchor: &DiffbaselessAnchor) -> bool {
+ (anchor.text_anchor.buffer_id == None
+ || anchor.text_anchor.buffer_id == Some(self.buffer_id))
+ && self
+ .range
+ .context
+ .start
+ .cmp(&anchor.text_anchor, &self.buffer)
+ .is_le()
+ && self
+ .range
+ .context
+ .end
+ .cmp(&anchor.text_anchor, &self.buffer)
+ .is_ge()
+ }
+
/// The [`Excerpt`]'s start offset in its [`Buffer`]
fn buffer_start_offset(&self) -> BufferOffset {
BufferOffset(self.range.context.start.to_offset(&self.buffer))
@@ -7427,6 +7446,12 @@ impl<'a> MultiBufferExcerpt<'a> {
&& range.end <= self.excerpt.buffer_end_offset()
}
+ /// Returns true if any part of the given range is in the buffer's excerpt
+ pub fn contains_partial_buffer_range(&self, range: Range<BufferOffset>) -> bool {
+ range.start <= self.excerpt.buffer_end_offset()
+ && range.end >= self.excerpt.buffer_start_offset()
+ }
+
pub fn max_buffer_row(&self) -> u32 {
self.excerpt.max_buffer_row
}
@@ -324,7 +324,7 @@ impl Prettier {
let server = cx
.update(|cx| {
- let params = server.default_initialize_params(false, cx);
+ let params = server.default_initialize_params(false, false, cx);
let configuration = lsp::DidChangeConfigurationParams {
settings: Default::default(),
};
@@ -14,7 +14,7 @@ use client::proto::{self, PeerId};
use clock::Global;
use collections::HashMap;
use futures::future;
-use gpui::{App, AsyncApp, Entity, SharedString, Task};
+use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::FluentBuilder};
use language::{
Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind, CharScopeContext,
OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction, Unclipped,
@@ -251,6 +251,44 @@ pub(crate) struct InlayHints {
pub range: Range<Anchor>,
}
+#[derive(Debug, Clone, Copy)]
+pub(crate) struct SemanticTokensFull {
+ pub for_server: Option<LanguageServerId>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct SemanticTokensDelta {
+ pub previous_result_id: SharedString,
+}
+
+#[derive(Debug)]
+pub(crate) enum SemanticTokensResponse {
+ Full {
+ data: Vec<u32>,
+ result_id: Option<SharedString>,
+ },
+ Delta {
+ edits: Vec<SemanticTokensEdit>,
+ result_id: Option<SharedString>,
+ },
+}
+
+impl Default for SemanticTokensResponse {
+ fn default() -> Self {
+ Self::Delta {
+ edits: Vec::new(),
+ result_id: None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct SemanticTokensEdit {
+ pub start: u32,
+ pub delete_count: u32,
+ pub data: Vec<u32>,
+}
+
#[derive(Debug, Copy, Clone)]
pub(crate) struct GetCodeLens;
@@ -277,7 +315,7 @@ pub struct GetDocumentDiagnostics {
/// We cannot blindly rely on server's capabilities.diagnostic_provider, as they're a singular field, whereas
/// a server can register multiple diagnostic providers post-mortem.
pub registration_id: Option<SharedString>,
- pub identifier: Option<String>,
+ pub identifier: Option<SharedString>,
pub previous_result_id: Option<SharedString>,
}
@@ -3469,6 +3507,310 @@ impl LspCommand for InlayHints {
}
}
+#[async_trait(?Send)]
+impl LspCommand for SemanticTokensFull {
+ type Response = SemanticTokensResponse;
+ type LspRequest = lsp::SemanticTokensFullRequest;
+ type ProtoRequest = proto::SemanticTokens;
+
+ fn display_name(&self) -> &str {
+ "Semantic tokens full"
+ }
+
+ fn check_capabilities(&self, capabilities: AdapterServerCapabilities) -> bool {
+ capabilities
+ .server_capabilities
+ .semantic_tokens_provider
+ .as_ref()
+ .is_some_and(|semantic_tokens_provider| {
+ let options = match semantic_tokens_provider {
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opts) => opts,
+ lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
+ opts,
+ ) => &opts.semantic_tokens_options,
+ };
+
+ match options.full {
+ Some(lsp::SemanticTokensFullOptions::Bool(is_supported)) => is_supported,
+ Some(lsp::SemanticTokensFullOptions::Delta { .. }) => true,
+ None => false,
+ }
+ })
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &App,
+ ) -> Result<lsp::SemanticTokensParams> {
+ Ok(lsp::SemanticTokensParams {
+ text_document: lsp::TextDocumentIdentifier {
+ uri: file_path_to_lsp_url(path)?,
+ },
+ partial_result_params: Default::default(),
+ work_done_progress_params: Default::default(),
+ })
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp::SemanticTokensResult>,
+ _: Entity<LspStore>,
+ _: Entity<Buffer>,
+ _: LanguageServerId,
+ _: AsyncApp,
+ ) -> anyhow::Result<SemanticTokensResponse> {
+ match message {
+ Some(lsp::SemanticTokensResult::Tokens(tokens)) => Ok(SemanticTokensResponse::Full {
+ data: tokens.data,
+ result_id: tokens.result_id.map(SharedString::new),
+ }),
+ Some(lsp::SemanticTokensResult::Partial(_)) => {
+ anyhow::bail!(
+ "Unexpected semantic tokens response with partial result for inlay hints"
+ )
+ }
+ None => Ok(Default::default()),
+ }
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::SemanticTokens {
+ proto::SemanticTokens {
+ project_id,
+ buffer_id: buffer.remote_id().into(),
+ version: serialize_version(&buffer.version()),
+ for_server: self.for_server.map(|id| id.to_proto()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::SemanticTokens,
+ _: Entity<LspStore>,
+ buffer: Entity<Buffer>,
+ mut cx: AsyncApp,
+ ) -> Result<Self> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(Self {
+ for_server: message
+ .for_server
+ .map(|id| LanguageServerId::from_proto(id)),
+ })
+ }
+
+ fn response_to_proto(
+ response: SemanticTokensResponse,
+ _: &mut LspStore,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut App,
+ ) -> proto::SemanticTokensResponse {
+ match response {
+ SemanticTokensResponse::Full { data, result_id } => proto::SemanticTokensResponse {
+ data,
+ edits: Vec::new(),
+ result_id: result_id.map(|s| s.to_string()),
+ version: serialize_version(buffer_version),
+ },
+ SemanticTokensResponse::Delta { edits, result_id } => proto::SemanticTokensResponse {
+ data: Vec::new(),
+ edits: edits
+ .into_iter()
+ .map(|edit| proto::SemanticTokensEdit {
+ start: edit.start,
+ delete_count: edit.delete_count,
+ data: edit.data,
+ })
+ .collect(),
+ result_id: result_id.map(|s| s.to_string()),
+ version: serialize_version(buffer_version),
+ },
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::SemanticTokensResponse,
+ _: Entity<LspStore>,
+ buffer: Entity<Buffer>,
+ mut cx: AsyncApp,
+ ) -> anyhow::Result<SemanticTokensResponse> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(SemanticTokensResponse::Full {
+ data: message.data,
+ result_id: message.result_id.map(SharedString::new),
+ })
+ }
+
+ fn buffer_id_from_proto(message: &proto::SemanticTokens) -> Result<BufferId> {
+ BufferId::new(message.buffer_id)
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for SemanticTokensDelta {
+ type Response = SemanticTokensResponse;
+ type LspRequest = lsp::SemanticTokensFullDeltaRequest;
+ type ProtoRequest = proto::SemanticTokens;
+
+ fn display_name(&self) -> &str {
+ "Semantic tokens delta"
+ }
+
+ fn check_capabilities(&self, capabilities: AdapterServerCapabilities) -> bool {
+ capabilities
+ .server_capabilities
+ .semantic_tokens_provider
+ .as_ref()
+ .is_some_and(|semantic_tokens_provider| {
+ let options = match semantic_tokens_provider {
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opts) => opts,
+ lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
+ opts,
+ ) => &opts.semantic_tokens_options,
+ };
+
+ match options.full {
+ Some(lsp::SemanticTokensFullOptions::Delta { delta }) => delta.unwrap_or(false),
+ // `full: true` (instead of `full: { delta: true }`) means no support for delta.
+ _ => false,
+ }
+ })
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &App,
+ ) -> Result<lsp::SemanticTokensDeltaParams> {
+ Ok(lsp::SemanticTokensDeltaParams {
+ text_document: lsp::TextDocumentIdentifier {
+ uri: file_path_to_lsp_url(path)?,
+ },
+ previous_result_id: self.previous_result_id.clone().map(|s| s.to_string()),
+ partial_result_params: Default::default(),
+ work_done_progress_params: Default::default(),
+ })
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp::SemanticTokensFullDeltaResult>,
+ _: Entity<LspStore>,
+ _: Entity<Buffer>,
+ _: LanguageServerId,
+ _: AsyncApp,
+ ) -> anyhow::Result<SemanticTokensResponse> {
+ match message {
+ Some(lsp::SemanticTokensFullDeltaResult::Tokens(tokens)) => {
+ Ok(SemanticTokensResponse::Full {
+ data: tokens.data,
+ result_id: tokens.result_id.map(SharedString::new),
+ })
+ }
+ Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(delta)) => {
+ Ok(SemanticTokensResponse::Delta {
+ edits: delta
+ .edits
+ .into_iter()
+ .map(|e| SemanticTokensEdit {
+ start: e.start,
+ delete_count: e.delete_count,
+ data: e.data.unwrap_or_default(),
+ })
+ .collect(),
+ result_id: delta.result_id.map(SharedString::new),
+ })
+ }
+ Some(lsp::SemanticTokensFullDeltaResult::PartialTokensDelta { .. }) => {
+ anyhow::bail!(
+ "Unexpected semantic tokens response with partial result for inlay hints"
+ )
+ }
+ None => Ok(Default::default()),
+ }
+ }
+
+ fn to_proto(&self, _: u64, _: &Buffer) -> proto::SemanticTokens {
+ unimplemented!("Delta requests are never initialted on the remote client side")
+ }
+
+ async fn from_proto(
+ _: proto::SemanticTokens,
+ _: Entity<LspStore>,
+ _: Entity<Buffer>,
+ _: AsyncApp,
+ ) -> Result<Self> {
+ unimplemented!("Delta requests are never initialted on the remote client side")
+ }
+
+ fn response_to_proto(
+ response: SemanticTokensResponse,
+ _: &mut LspStore,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut App,
+ ) -> proto::SemanticTokensResponse {
+ match response {
+ SemanticTokensResponse::Full { data, result_id } => proto::SemanticTokensResponse {
+ data,
+ edits: Vec::new(),
+ result_id: result_id.map(|s| s.to_string()),
+ version: serialize_version(buffer_version),
+ },
+ SemanticTokensResponse::Delta { edits, result_id } => proto::SemanticTokensResponse {
+ data: Vec::new(),
+ edits: edits
+ .into_iter()
+ .map(|edit| proto::SemanticTokensEdit {
+ start: edit.start,
+ delete_count: edit.delete_count,
+ data: edit.data,
+ })
+ .collect(),
+ result_id: result_id.map(|s| s.to_string()),
+ version: serialize_version(buffer_version),
+ },
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::SemanticTokensResponse,
+ _: Entity<LspStore>,
+ buffer: Entity<Buffer>,
+ mut cx: AsyncApp,
+ ) -> anyhow::Result<SemanticTokensResponse> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(SemanticTokensResponse::Full {
+ data: message.data,
+ result_id: message.result_id.map(SharedString::new),
+ })
+ }
+
+ fn buffer_id_from_proto(message: &proto::SemanticTokens) -> Result<BufferId> {
+ BufferId::new(message.buffer_id)
+ }
+}
+
#[async_trait(?Send)]
impl LspCommand for GetCodeLens {
type Response = Vec<CodeAction>;
@@ -4080,8 +4422,8 @@ impl LspCommand for GetDocumentDiagnostics {
text_document: lsp::TextDocumentIdentifier {
uri: file_path_to_lsp_url(path)?,
},
- identifier: self.identifier.clone(),
- previous_result_id: self.previous_result_id.clone().map(|id| id.to_string()),
+ identifier: self.identifier.as_ref().map(ToString::to_string),
+ previous_result_id: self.previous_result_id.as_ref().map(ToString::to_string),
partial_result_params: Default::default(),
work_done_progress_params: Default::default(),
})
@@ -14,6 +14,7 @@ pub mod json_language_server_ext;
pub mod log_store;
pub mod lsp_ext_command;
pub mod rust_analyzer_ext;
+mod semantic_tokens;
pub mod vue_language_server_ext;
mod inlay_hint_cache;
@@ -30,6 +31,7 @@ use crate::{
lsp_store::{
self,
log_store::{GlobalLogStore, LanguageServerKind},
+ semantic_tokens::SemanticTokensData,
},
manifest_tree::{
LanguageServerTree, LanguageServerTreeNode, LaunchDisposition, ManifestQueryDelegate,
@@ -66,7 +68,10 @@ use language::{
LanguageName, LanguageRegistry, LocalFile, LspAdapter, LspAdapterDelegate, LspInstaller,
ManifestDelegate, ManifestName, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16,
Toolchain, Transaction, Unclipped,
- language_settings::{FormatOnSave, Formatter, LanguageSettings, language_settings},
+ language_settings::{
+ AllLanguageSettings, FormatOnSave, Formatter, LanguageSettings, all_language_settings,
+ language_settings,
+ },
point_to_lsp,
proto::{
deserialize_anchor, deserialize_anchor_range, deserialize_lsp_edit, deserialize_version,
@@ -136,6 +141,10 @@ pub use language::Location;
pub use lsp_store::inlay_hint_cache::{CacheInlayHints, InvalidationStrategy};
#[cfg(any(test, feature = "test-support"))]
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
+pub use semantic_tokens::{
+ BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer, TokenType,
+};
+use settings::SemanticTokenRules;
pub use worktree::{
Entry, EntryKind, FS_WATCH_LATENCY, File, LocalWorktree, PathChange, ProjectEntryId,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
@@ -205,7 +214,7 @@ pub enum LspFormatTarget {
Ranges(BTreeMap<BufferId, Vec<Range<Anchor>>>),
}
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct OpenLspBufferHandle(Entity<OpenLspBuffer>);
struct OpenLspBuffer(Entity<Buffer>);
@@ -364,6 +373,7 @@ impl LocalLspStore {
adapter,
disposition.settings.clone(),
key.clone(),
+ language_name.clone(),
cx,
);
if let Some(state) = self.language_server_ids.get_mut(&key) {
@@ -385,6 +395,7 @@ impl LocalLspStore {
adapter: Arc<CachedLspAdapter>,
settings: Arc<LspSettings>,
key: LanguageServerSeed,
+ language_name: LanguageName,
cx: &mut App,
) -> LanguageServerId {
let worktree = worktree_handle.read(cx);
@@ -500,11 +511,18 @@ impl LocalLspStore {
let adapter = adapter.clone();
let lsp_store = self.weak.clone();
let pending_workspace_folders = pending_workspace_folders.clone();
-
let pull_diagnostics = ProjectSettings::get_global(cx)
.diagnostics
.lsp_pull_diagnostics
.enabled;
+ let settings_location = SettingsLocation {
+ worktree_id,
+ path: RelPath::empty(),
+ };
+ let augments_syntax_tokens = AllLanguageSettings::get(Some(settings_location), cx)
+ .language(Some(settings_location), Some(&language_name), cx)
+ .semantic_tokens
+ .use_tree_sitter();
cx.spawn(async move |cx| {
let result = async {
let language_server = pending_server.await?;
@@ -533,8 +551,11 @@ impl LocalLspStore {
}
let initialization_params = cx.update(|cx| {
- let mut params =
- language_server.default_initialize_params(pull_diagnostics, cx);
+ let mut params = language_server.default_initialize_params(
+ pull_diagnostics,
+ augments_syntax_tokens,
+ cx,
+ );
params.initialization_options = initialization_options;
adapter.adapter.prepare_initialize_params(params, cx)
})?;
@@ -1070,6 +1091,41 @@ impl LocalLspStore {
})
.detach();
+ language_server
+ .on_request::<lsp::request::SemanticTokensRefresh, _, _>({
+ let lsp_store = lsp_store.clone();
+ let request_id = Arc::new(AtomicUsize::new(0));
+ move |(), cx| {
+ let lsp_store = lsp_store.clone();
+ let request_id = request_id.clone();
+ let mut cx = cx.clone();
+ async move {
+ lsp_store
+ .update(&mut cx, |lsp_store, cx| {
+ let request_id =
+ Some(request_id.fetch_add(1, atomic::Ordering::AcqRel));
+ cx.emit(LspStoreEvent::RefreshSemanticTokens {
+ server_id,
+ request_id,
+ });
+ lsp_store
+ .downstream_client
+ .as_ref()
+ .map(|(client, project_id)| {
+ client.send(proto::RefreshSemanticTokens {
+ project_id: *project_id,
+ server_id: server_id.to_proto(),
+ request_id: request_id.map(|id| id as u64),
+ })
+ })
+ })?
+ .transpose()?;
+ Ok(())
+ }
+ }
+ })
+ .detach();
+
language_server
.on_request::<lsp::request::WorkspaceDiagnosticRefresh, _, _>({
let this = lsp_store.clone();
@@ -3785,8 +3841,12 @@ pub struct LspStore {
diagnostic_summaries:
HashMap<WorktreeId, HashMap<Arc<RelPath>, HashMap<LanguageServerId, DiagnosticSummary>>>,
pub lsp_server_capabilities: HashMap<LanguageServerId, lsp::ServerCapabilities>,
+ semantic_token_stylizers:
+ HashMap<(LanguageServerId, Option<LanguageName>), SemanticTokenStylizer>,
+ semantic_token_rules: SemanticTokenRules,
lsp_data: HashMap<BufferId, BufferLspData>,
next_hint_id: Arc<AtomicUsize>,
+ global_semantic_tokens_mode: settings::SemanticTokens,
}
#[derive(Debug)]
@@ -3794,6 +3854,7 @@ pub struct BufferLspData {
buffer_version: Global,
document_colors: Option<DocumentColorData>,
code_lens: Option<CodeLensData>,
+ semantic_tokens: Option<SemanticTokensData>,
inlay_hints: BufferInlayHints,
lsp_requests: HashMap<LspKey, HashMap<LspRequestId, Task<()>>>,
chunk_lsp_requests: HashMap<LspKey, HashMap<RowChunk, LspRequestId>>,
@@ -3811,6 +3872,7 @@ impl BufferLspData {
buffer_version: buffer.read(cx).version(),
document_colors: None,
code_lens: None,
+ semantic_tokens: None,
inlay_hints: BufferInlayHints::new(buffer, cx),
lsp_requests: HashMap::default(),
chunk_lsp_requests: HashMap::default(),
@@ -3828,6 +3890,13 @@ impl BufferLspData {
}
self.inlay_hints.remove_server_data(for_server);
+
+ if let Some(semantic_tokens) = &mut self.semantic_tokens {
+ semantic_tokens.raw_tokens.servers.remove(&for_server);
+ semantic_tokens
+ .latest_invalidation_requests
+ .remove(&for_server);
+ }
}
#[cfg(any(test, feature = "test-support"))]
@@ -3878,6 +3947,10 @@ pub enum LspStoreEvent {
server_id: LanguageServerId,
request_id: Option<usize>,
},
+ RefreshSemanticTokens {
+ server_id: LanguageServerId,
+ request_id: Option<usize>,
+ },
RefreshCodeLens,
DiagnosticsUpdated {
server_id: LanguageServerId,
@@ -3960,6 +4033,7 @@ impl LspStore {
client.add_entity_request_handler(Self::handle_get_color_presentation);
client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
client.add_entity_request_handler(Self::handle_refresh_inlay_hints);
+ client.add_entity_request_handler(Self::handle_refresh_semantic_tokens);
client.add_entity_request_handler(Self::handle_refresh_code_lens);
client.add_entity_request_handler(Self::handle_on_type_formatting);
client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
@@ -4056,6 +4130,8 @@ impl LspStore {
(Self::maintain_workspace_config(receiver, cx), sender)
};
+ let global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens;
+
Self {
mode: LspStoreMode::Local(LocalLspStore {
weak: cx.weak_entity(),
@@ -4109,10 +4185,16 @@ impl LspStore {
nonce: StdRng::from_os_rng().random(),
diagnostic_summaries: HashMap::default(),
lsp_server_capabilities: HashMap::default(),
+ semantic_token_stylizers: HashMap::default(),
+ semantic_token_rules: crate::project_settings::ProjectSettings::get_global(cx)
+ .global_lsp_settings
+ .semantic_token_rules
+ .clone(),
lsp_data: HashMap::default(),
next_hint_id: Arc::default(),
active_entry: None,
_maintain_workspace_config,
+ global_semantic_tokens_mode,
_maintain_buffer_languages: Self::maintain_buffer_languages(languages, cx),
}
}
@@ -4155,6 +4237,7 @@ impl LspStore {
let (sender, receiver) = watch::channel();
(Self::maintain_workspace_config(receiver, cx), sender)
};
+ let global_semantic_tokens_mode = all_language_settings(None, cx).defaults.semantic_tokens;
Self {
mode: LspStoreMode::Remote(RemoteLspStore {
upstream_client: Some(upstream_client),
@@ -4164,11 +4247,17 @@ impl LspStore {
last_formatting_failure: None,
buffer_store,
worktree_store,
+ global_semantic_tokens_mode,
languages: languages.clone(),
language_server_statuses: Default::default(),
nonce: StdRng::from_os_rng().random(),
diagnostic_summaries: HashMap::default(),
lsp_server_capabilities: HashMap::default(),
+ semantic_token_stylizers: HashMap::default(),
+ semantic_token_rules: crate::project_settings::ProjectSettings::get_global(cx)
+ .global_lsp_settings
+ .semantic_token_rules
+ .clone(),
next_hint_id: Arc::default(),
lsp_data: HashMap::default(),
active_entry: None,
@@ -4440,7 +4529,7 @@ impl LspStore {
},
result_id: None,
registration_id: None,
- server_id: server_id,
+ server_id,
disk_based_sources: Cow::Borrowed(&[]),
})
.collect::<Vec<_>>();
@@ -4970,6 +5059,22 @@ impl LspStore {
})
}
+ let new_semantic_token_rules = crate::project_settings::ProjectSettings::get_global(cx)
+ .global_lsp_settings
+ .semantic_token_rules
+ .clone();
+ if new_semantic_token_rules != self.semantic_token_rules {
+ self.semantic_token_rules = new_semantic_token_rules;
+ self.semantic_token_stylizers.clear();
+ }
+
+ let new_global_semantic_tokens_mode =
+ all_language_settings(None, cx).defaults.semantic_tokens;
+ if new_global_semantic_tokens_mode != self.global_semantic_tokens_mode {
+ self.global_semantic_tokens_mode = new_global_semantic_tokens_mode;
+ self.restart_all_language_servers(cx);
+ }
+
cx.notify();
}
@@ -7438,7 +7543,7 @@ impl LspStore {
diagnostics,
version: None,
},
- result_id,
+ result_id: result_id.map(SharedString::new),
disk_based_sources,
registration_id: new_registration_id,
});
@@ -8897,6 +9002,33 @@ impl LspStore {
})
}
+ fn local_lsp_servers_for_buffer(
+ &self,
+ buffer: &Entity<Buffer>,
+ cx: &mut Context<Self>,
+ ) -> Vec<LanguageServerId> {
+ let Some(local) = self.as_local() else {
+ return Vec::new();
+ };
+
+ let snapshot = buffer.read(cx).snapshot();
+
+ buffer.update(cx, |buffer, cx| {
+ local
+ .language_servers_for_buffer(buffer, cx)
+ .map(|(_, server)| server.server_id())
+ .filter(|server_id| {
+ self.as_local().is_none_or(|local| {
+ local
+ .buffers_opened_in_servers
+ .get(&snapshot.remote_id())
+ .is_some_and(|servers| servers.contains(server_id))
+ })
+ })
+ .collect()
+ })
+ }
+
fn request_multiple_lsp_locally<P, R>(
&mut self,
buffer: &Entity<Buffer>,
@@ -9197,47 +9329,6 @@ impl LspStore {
)
.await?;
}
- Request::GetDocumentDiagnostics(get_document_diagnostics) => {
- let buffer_id = BufferId::new(get_document_diagnostics.buffer_id())?;
- let version = deserialize_version(get_document_diagnostics.buffer_version());
- let buffer = lsp_store.update(&mut cx, |this, cx| {
- this.buffer_store.read(cx).get_existing(buffer_id)
- })?;
- buffer
- .update(&mut cx, |buffer, _| {
- buffer.wait_for_version(version.clone())
- })
- .await?;
- lsp_store.update(&mut cx, |lsp_store, cx| {
- let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
- let key = LspKey {
- request_type: TypeId::of::<GetDocumentDiagnostics>(),
- server_queried: server_id,
- };
- if <GetDocumentDiagnostics as LspCommand>::ProtoRequest::stop_previous_requests(
- ) {
- if let Some(lsp_requests) = lsp_data.lsp_requests.get_mut(&key) {
- lsp_requests.clear();
- };
- }
-
- let existing_queries = lsp_data.lsp_requests.entry(key).or_default();
- existing_queries.insert(
- lsp_request_id,
- cx.spawn(async move |lsp_store, cx| {
- let diagnostics_pull = lsp_store.update(cx, |lsp_store, cx| {
- lsp_store.pull_diagnostics_for_buffer(buffer, cx)
- });
- if let Ok(diagnostics_pull) = diagnostics_pull {
- match diagnostics_pull.await {
- Ok(()) => {}
- Err(e) => log::error!("Failed to pull diagnostics: {e:#}"),
- };
- }
- }),
- );
- });
- }
Request::InlayHints(inlay_hints) => {
let query_start = inlay_hints
.start
@@ -9271,6 +9362,118 @@ impl LspStore {
.await
.context("querying for inlay hints")?
}
+ //////////////////////////////
+ // Below are LSP queries that need to fetch more data,
+ // hence cannot just proxy the request to language server with `query_lsp_locally`.
+ Request::GetDocumentDiagnostics(get_document_diagnostics) => {
+ let (_, buffer) = Self::wait_for_buffer_version::<GetDocumentDiagnostics>(
+ &lsp_store,
+ &get_document_diagnostics,
+ &mut cx,
+ )
+ .await?;
+ lsp_store.update(&mut cx, |lsp_store, cx| {
+ let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
+ let key = LspKey {
+ request_type: TypeId::of::<GetDocumentDiagnostics>(),
+ server_queried: server_id,
+ };
+ if <GetDocumentDiagnostics as LspCommand>::ProtoRequest::stop_previous_requests(
+ ) {
+ if let Some(lsp_requests) = lsp_data.lsp_requests.get_mut(&key) {
+ lsp_requests.clear();
+ };
+ }
+
+ lsp_data.lsp_requests.entry(key).or_default().insert(
+ lsp_request_id,
+ cx.spawn(async move |lsp_store, cx| {
+ let diagnostics_pull = lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store.pull_diagnostics_for_buffer(buffer, cx)
+ })
+ .ok();
+ if let Some(diagnostics_pull) = diagnostics_pull {
+ match diagnostics_pull.await {
+ Ok(()) => {}
+ Err(e) => log::error!("Failed to pull diagnostics: {e:#}"),
+ };
+ }
+ }),
+ );
+ });
+ }
+ Request::SemanticTokens(semantic_tokens) => {
+ let (buffer_version, buffer) = Self::wait_for_buffer_version::<SemanticTokensFull>(
+ &lsp_store,
+ &semantic_tokens,
+ &mut cx,
+ )
+ .await?;
+ let for_server = semantic_tokens.for_server.map(LanguageServerId::from_proto);
+ lsp_store.update(&mut cx, |lsp_store, cx| {
+ if let Some((client, project_id)) = lsp_store.downstream_client.clone() {
+ let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
+ let key = LspKey {
+ request_type: TypeId::of::<SemanticTokensFull>(),
+ server_queried: server_id,
+ };
+ if <SemanticTokensFull as LspCommand>::ProtoRequest::stop_previous_requests() {
+ if let Some(lsp_requests) = lsp_data.lsp_requests.get_mut(&key) {
+ lsp_requests.clear();
+ };
+ }
+
+ lsp_data.lsp_requests.entry(key).or_default().insert(
+ lsp_request_id,
+ cx.spawn(async move |lsp_store, cx| {
+ let tokens_fetch = lsp_store
+ .update(cx, |lsp_store, cx| {
+ lsp_store
+ .fetch_semantic_tokens_for_buffer(&buffer, for_server, cx)
+ })
+ .ok();
+ if let Some(tokens_fetch) = tokens_fetch {
+ let new_tokens = tokens_fetch.await;
+ if let Some(new_tokens) = new_tokens {
+ lsp_store
+ .update(cx, |lsp_store, cx| {
+ let response = new_tokens
+ .into_iter()
+ .map(|(server_id, response)| {
+ (
+ server_id.to_proto(),
+ SemanticTokensFull::response_to_proto(
+ response,
+ lsp_store,
+ sender_id,
+ &buffer_version,
+ cx,
+ ),
+ )
+ })
+ .collect::<HashMap<_, _>>();
+ match client.send_lsp_response::<<SemanticTokensFull as LspCommand>::ProtoRequest>(
+ project_id,
+ lsp_request_id,
+ response,
+ ) {
+ Ok(()) => {}
+ Err(e) => {
+ log::error!(
+ "Failed to send semantic tokens LSP response: {e:#}",
+ )
+ }
+ }
+ })
+ .ok();
+ }
+ }
+ }),
+ );
+ }
+ });
+ }
}
Ok(proto::Ack {})
}
@@ -11144,6 +11347,11 @@ impl LspStore {
}
}
+ pub fn restart_all_language_servers(&mut self, cx: &mut Context<Self>) {
+ let buffers = self.buffer_store.read(cx).buffers().collect();
+ self.restart_language_servers_for_buffers(buffers, HashSet::default(), cx);
+ }
+
pub fn restart_language_servers_for_buffers(
&mut self,
buffers: Vec<Entity<Buffer>>,
@@ -12239,6 +12447,8 @@ impl LspStore {
fn cleanup_lsp_data(&mut self, for_server: LanguageServerId) {
self.lsp_server_capabilities.remove(&for_server);
+ self.semantic_token_stylizers
+ .retain(|&(id, _), _| id != for_server);
for lsp_data in self.lsp_data.values_mut() {
lsp_data.remove_server_data(for_server);
}
@@ -12456,7 +12666,7 @@ impl LspStore {
diagnostics,
version,
},
- result_id,
+ result_id: result_id.map(SharedString::new),
disk_based_sources,
registration_id: new_registration_id,
});
@@ -13168,15 +13378,8 @@ impl LspStore {
<T::ProtoRequest as proto::RequestMessage>::Response:
Into<<T::ProtoRequest as proto::LspRequestMessage>::Response>,
{
- let buffer_id = BufferId::new(proto_request.buffer_id())?;
- let version = deserialize_version(proto_request.buffer_version());
- let buffer = lsp_store.update(cx, |this, cx| {
- this.buffer_store.read(cx).get_existing(buffer_id)
- })?;
- buffer
- .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))
- .await?;
- let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version());
+ let (buffer_version, buffer) =
+ Self::wait_for_buffer_version::<T>(&lsp_store, &proto_request, cx).await?;
let request =
T::from_proto(proto_request, lsp_store.clone(), buffer.clone(), cx.clone()).await?;
let key = LspKey {
@@ -13256,6 +13459,27 @@ impl LspStore {
Ok(())
}
+ async fn wait_for_buffer_version<T>(
+ lsp_store: &Entity<Self>,
+ proto_request: &T::ProtoRequest,
+ cx: &mut AsyncApp,
+ ) -> Result<(Global, Entity<Buffer>)>
+ where
+ T: LspCommand,
+ T::ProtoRequest: proto::LspRequestMessage,
+ {
+ let buffer_id = BufferId::new(proto_request.buffer_id())?;
+ let version = deserialize_version(proto_request.buffer_version());
+ let buffer = lsp_store.update(cx, |this, cx| {
+ this.buffer_store.read(cx).get_existing(buffer_id)
+ })?;
+ buffer
+ .update(cx, |buffer, _| buffer.wait_for_version(version.clone()))
+ .await?;
+ let buffer_version = buffer.read_with(cx, |buffer, _| buffer.version());
+ Ok((buffer_version, buffer))
+ }
+
fn take_text_document_sync_options(
capabilities: &mut lsp::ServerCapabilities,
) -> lsp::TextDocumentSyncOptions {
@@ -13306,7 +13530,11 @@ impl LspStore {
.entry(buffer_id)
.or_insert_with(|| BufferLspData::new(buffer, cx));
if buffer_version.changed_since(&lsp_data.buffer_version) {
+ // To send delta requests for semantic tokens, the previous tokens
+ // need to be kept between buffer changes.
+ let semantic_tokens = lsp_data.semantic_tokens.take();
*lsp_data = BufferLspData::new(buffer, cx);
+ lsp_data.semantic_tokens = semantic_tokens;
}
lsp_data
}
@@ -13497,14 +13725,18 @@ fn lsp_workspace_diagnostics_refresh(
})
}
-fn buffer_diagnostic_identifier(options: &DiagnosticServerCapabilities) -> Option<String> {
+fn buffer_diagnostic_identifier(options: &DiagnosticServerCapabilities) -> Option<SharedString> {
match &options {
- lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => {
- diagnostic_options.identifier.clone()
- }
+ lsp::DiagnosticServerCapabilities::Options(diagnostic_options) => diagnostic_options
+ .identifier
+ .as_deref()
+ .map(SharedString::new),
lsp::DiagnosticServerCapabilities::RegistrationOptions(registration_options) => {
let diagnostic_options = ®istration_options.diagnostic_options;
- diagnostic_options.identifier.clone()
+ diagnostic_options
+ .identifier
+ .as_deref()
+ .map(SharedString::new)
}
}
}
@@ -0,0 +1,931 @@
+use std::{collections::hash_map, ops::Range, slice::ChunksExact, sync::Arc};
+
+use anyhow::Result;
+
+use clock::Global;
+use collections::HashMap;
+use futures::{
+ FutureExt as _,
+ future::{Shared, join_all},
+};
+use gpui::{App, AppContext, AsyncApp, Context, Entity, ReadGlobal as _, SharedString, Task};
+use itertools::Itertools;
+use language::{Buffer, LanguageName};
+use lsp::{AdapterServerCapabilities, LSP_REQUEST_TIMEOUT, LanguageServerId};
+use rpc::{TypedEnvelope, proto};
+use settings::{SemanticTokenRule, SemanticTokenRules, Settings as _, SettingsStore};
+use smol::future::yield_now;
+use text::{Anchor, Bias, OffsetUtf16, PointUtf16, Unclipped};
+use util::ResultExt as _;
+
+use crate::{
+ LanguageServerToQuery, LspStore, LspStoreEvent,
+ lsp_command::{
+ LspCommand, SemanticTokensDelta, SemanticTokensEdit, SemanticTokensFull,
+ SemanticTokensResponse,
+ },
+ project_settings::ProjectSettings,
+};
+
+#[derive(Debug, Clone, Copy)]
+pub struct RefreshForServer {
+ pub server_id: LanguageServerId,
+ pub request_id: Option<usize>,
+}
+
+impl LspStore {
+ pub fn semantic_tokens(
+ &mut self,
+ buffer: Entity<Buffer>,
+ refresh: Option<RefreshForServer>,
+ cx: &mut Context<Self>,
+ ) -> SemanticTokensTask {
+ let version_queried_for = buffer.read(cx).version();
+ let latest_lsp_data = self.latest_lsp_data(&buffer, cx);
+ let semantic_tokens_data = latest_lsp_data.semantic_tokens.get_or_insert_default();
+ if let Some(refresh) = refresh {
+ let mut invalidate_cache = true;
+ match semantic_tokens_data
+ .latest_invalidation_requests
+ .entry(refresh.server_id)
+ {
+ hash_map::Entry::Occupied(mut o) => {
+ if refresh.request_id > *o.get() {
+ o.insert(refresh.request_id);
+ } else {
+ invalidate_cache = false;
+ }
+ }
+ hash_map::Entry::Vacant(v) => {
+ v.insert(refresh.request_id);
+ }
+ }
+
+ if invalidate_cache {
+ let SemanticTokensData {
+ raw_tokens,
+ latest_invalidation_requests: _,
+ update,
+ } = semantic_tokens_data;
+ *update = None;
+ raw_tokens.servers.clear();
+ }
+ }
+
+ if let Some((updating_for, task)) = &semantic_tokens_data.update
+ && !version_queried_for.changed_since(updating_for)
+ {
+ return task.clone();
+ }
+
+ let new_tokens = self.fetch_semantic_tokens_for_buffer(
+ &buffer,
+ refresh.map(|refresh| refresh.server_id),
+ cx,
+ );
+
+ let task_buffer = buffer.clone();
+ let task_version_queried_for = version_queried_for.clone();
+ let task = cx
+ .spawn(async move |lsp_store, cx| {
+ let buffer = task_buffer;
+ let version_queried_for = task_version_queried_for;
+ let res = if let Some(new_tokens) = new_tokens.await {
+ let (raw_tokens, buffer_snapshot) = lsp_store
+ .update(cx, |lsp_store, cx| {
+ let lsp_data = lsp_store.latest_lsp_data(&buffer, cx);
+ let semantic_tokens_data =
+ lsp_data.semantic_tokens.get_or_insert_default();
+
+ if version_queried_for == lsp_data.buffer_version {
+ for (server_id, new_tokens_response) in new_tokens {
+ match new_tokens_response {
+ SemanticTokensResponse::Full { data, result_id } => {
+ semantic_tokens_data.raw_tokens.servers.insert(
+ server_id,
+ Arc::new(ServerSemanticTokens::from_full(
+ data, result_id,
+ )),
+ );
+ }
+ SemanticTokensResponse::Delta { edits, result_id } => {
+ if let Some(tokens) = semantic_tokens_data
+ .raw_tokens
+ .servers
+ .get_mut(&server_id)
+ {
+ let tokens = Arc::make_mut(tokens);
+ tokens.result_id = result_id;
+ tokens.apply(&edits);
+ }
+ }
+ }
+ }
+ }
+ let buffer_snapshot =
+ buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ (semantic_tokens_data.raw_tokens.clone(), buffer_snapshot)
+ })
+ .map_err(Arc::new)?;
+ Some(raw_to_buffer_semantic_tokens(raw_tokens, &buffer_snapshot).await)
+ } else {
+ lsp_store.update(cx, |lsp_store, cx| {
+ if let Some(current_lsp_data) =
+ lsp_store.current_lsp_data(buffer.read(cx).remote_id())
+ {
+ if current_lsp_data.buffer_version == version_queried_for {
+ current_lsp_data.semantic_tokens = None;
+ }
+ }
+ })?;
+ None
+ };
+ Ok(BufferSemanticTokens { tokens: res })
+ })
+ .shared();
+
+ self.latest_lsp_data(&buffer, cx)
+ .semantic_tokens
+ .get_or_insert_default()
+ .update = Some((version_queried_for, task.clone()));
+
+ task
+ }
+
+ pub(super) fn fetch_semantic_tokens_for_buffer(
+ &mut self,
+ buffer: &Entity<Buffer>,
+ for_server: Option<LanguageServerId>,
+ cx: &mut Context<Self>,
+ ) -> Task<Option<HashMap<LanguageServerId, SemanticTokensResponse>>> {
+ if let Some((client, upstream_project_id)) = self.upstream_client() {
+ let request = SemanticTokensFull { for_server };
+ if !self.is_capable_for_proto_request(buffer, &request, cx) {
+ return Task::ready(None);
+ }
+
+ let request_task = client.request_lsp(
+ upstream_project_id,
+ None,
+ LSP_REQUEST_TIMEOUT,
+ cx.background_executor().clone(),
+ request.to_proto(upstream_project_id, buffer.read(cx)),
+ );
+ let buffer = buffer.clone();
+ cx.spawn(async move |weak_lsp_store, cx| {
+ let lsp_store = weak_lsp_store.upgrade()?;
+ let tokens = join_all(
+ request_task
+ .await
+ .log_err()
+ .flatten()
+ .map(|response| response.payload)
+ .unwrap_or_default()
+ .into_iter()
+ .map(|response| {
+ let server_id = LanguageServerId::from_proto(response.server_id);
+ let response = request.response_from_proto(
+ response.response,
+ lsp_store.clone(),
+ buffer.clone(),
+ cx.clone(),
+ );
+ async move {
+ match response.await {
+ Ok(tokens) => Some((server_id, tokens)),
+ Err(e) => {
+ log::error!("Failed to query remote semantic tokens for server {server_id:?}: {e:#}");
+ None
+ }
+ }
+ }
+ }),
+ )
+ .await
+ .into_iter()
+ .flatten()
+ .collect();
+ Some(tokens)
+ })
+ } else {
+ let token_tasks = self
+ .local_lsp_servers_for_buffer(&buffer, cx)
+ .into_iter()
+ .filter(|&server_id| {
+ for_server.is_none_or(|for_server_id| for_server_id == server_id)
+ })
+ .filter_map(|server_id| {
+ let capabilities = AdapterServerCapabilities {
+ server_capabilities: self.lsp_server_capabilities.get(&server_id)?.clone(),
+ code_action_kinds: None,
+ };
+ let request_task = match self.semantic_tokens_result_id(server_id, buffer, cx) {
+ Some(result_id) => {
+ let delta_request = SemanticTokensDelta {
+ previous_result_id: result_id,
+ };
+ if !delta_request.check_capabilities(capabilities.clone()) {
+ let full_request = SemanticTokensFull {
+ for_server: Some(server_id),
+ };
+ if !full_request.check_capabilities(capabilities) {
+ return None;
+ }
+
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Other(server_id),
+ full_request,
+ cx,
+ )
+ } else {
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Other(server_id),
+ delta_request,
+ cx,
+ )
+ }
+ }
+ None => {
+ let request = SemanticTokensFull {
+ for_server: Some(server_id),
+ };
+ if !request.check_capabilities(capabilities) {
+ return None;
+ }
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Other(server_id),
+ request,
+ cx,
+ )
+ }
+ };
+ Some(async move { (server_id, request_task.await) })
+ })
+ .collect::<Vec<_>>();
+ if token_tasks.is_empty() {
+ return Task::ready(None);
+ }
+
+ cx.background_spawn(async move {
+ Some(
+ join_all(token_tasks)
+ .await
+ .into_iter()
+ .flat_map(|(server_id, response)| {
+ match response {
+ Ok(tokens) => Some((server_id, tokens)),
+ Err(e) => {
+ log::error!("Failed to query remote semantic tokens for server {server_id:?}: {e:#}");
+ None
+ }
+ }
+ })
+ .collect()
+ )
+ })
+ }
+ }
+
+ pub(crate) async fn handle_refresh_semantic_tokens(
+ lsp_store: Entity<Self>,
+ envelope: TypedEnvelope<proto::RefreshSemanticTokens>,
+ mut cx: AsyncApp,
+ ) -> Result<proto::Ack> {
+ lsp_store.update(&mut cx, |_, cx| {
+ cx.emit(LspStoreEvent::RefreshSemanticTokens {
+ server_id: LanguageServerId::from_proto(envelope.payload.server_id),
+ request_id: envelope.payload.request_id.map(|id| id as usize),
+ });
+ });
+ Ok(proto::Ack {})
+ }
+
+ fn semantic_tokens_result_id(
+ &mut self,
+ server_id: LanguageServerId,
+ buffer: &Entity<Buffer>,
+ cx: &mut App,
+ ) -> Option<SharedString> {
+ self.latest_lsp_data(buffer, cx)
+ .semantic_tokens
+ .as_ref()?
+ .raw_tokens
+ .servers
+ .get(&server_id)?
+ .result_id
+ .clone()
+ }
+
+ pub fn get_or_create_token_stylizer(
+ &mut self,
+ server_id: LanguageServerId,
+ language: Option<&LanguageName>,
+ cx: &mut App,
+ ) -> Option<&SemanticTokenStylizer> {
+ let stylizer = match self
+ .semantic_token_stylizers
+ .entry((server_id, language.cloned()))
+ {
+ hash_map::Entry::Occupied(o) => o.into_mut(),
+ hash_map::Entry::Vacant(v) => {
+ let tokens_provider = self
+ .lsp_server_capabilities
+ .get(&server_id)?
+ .semantic_tokens_provider
+ .as_ref()?;
+ let legend = match tokens_provider {
+ lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(opts) => {
+ &opts.legend
+ }
+ lsp::SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions(
+ opts,
+ ) => &opts.semantic_tokens_options.legend,
+ };
+ let language_rules = language.and_then(|language| {
+ SettingsStore::global(cx).language_semantic_token_rules(language.as_ref())
+ });
+ let stylizer = SemanticTokenStylizer::new(server_id, legend, language_rules, cx);
+ v.insert(stylizer)
+ }
+ };
+ Some(stylizer)
+ }
+}
+
+pub type SemanticTokensTask =
+ Shared<Task<std::result::Result<BufferSemanticTokens, Arc<anyhow::Error>>>>;
+
+#[derive(Debug, Default, Clone)]
+pub struct BufferSemanticTokens {
+ pub tokens: Option<HashMap<LanguageServerId, Arc<[BufferSemanticToken]>>>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenType(pub u32);
+
+#[derive(Debug, Clone)]
+pub struct BufferSemanticToken {
+ pub range: Range<Anchor>,
+ pub token_type: TokenType,
+ pub token_modifiers: u32,
+}
+
+pub struct SemanticTokenStylizer {
+ server_id: LanguageServerId,
+ rules_by_token_type: HashMap<TokenType, Vec<SemanticTokenRule>>,
+ token_type_names: HashMap<TokenType, SharedString>,
+ modifier_mask: HashMap<SharedString, u32>,
+}
+
+impl SemanticTokenStylizer {
+ pub fn new(
+ server_id: LanguageServerId,
+ legend: &lsp::SemanticTokensLegend,
+ language_rules: Option<&SemanticTokenRules>,
+ cx: &App,
+ ) -> Self {
+ let token_types: HashMap<TokenType, SharedString> = legend
+ .token_types
+ .iter()
+ .enumerate()
+ .map(|(i, token_type)| {
+ (
+ TokenType(i as u32),
+ SharedString::from(token_type.as_str().to_string()),
+ )
+ })
+ .collect();
+ let modifier_mask: HashMap<SharedString, u32> = legend
+ .token_modifiers
+ .iter()
+ .enumerate()
+ .map(|(i, modifier)| (SharedString::from(modifier.as_str().to_string()), 1 << i))
+ .collect();
+
+ let global_rules = &ProjectSettings::get_global(cx)
+ .global_lsp_settings
+ .semantic_token_rules;
+
+ let rules_by_token_type = token_types
+ .iter()
+ .map(|(index, token_type_name)| {
+ let filter = |rule: &&SemanticTokenRule| {
+ rule.token_type
+ .as_ref()
+ .is_none_or(|rule_token_type| rule_token_type == token_type_name.as_ref())
+ };
+ let matching_rules: Vec<SemanticTokenRule> = global_rules
+ .rules
+ .iter()
+ .chain(language_rules.into_iter().flat_map(|lr| &lr.rules))
+ .rev()
+ .filter(filter)
+ .cloned()
+ .collect();
+ (*index, matching_rules)
+ })
+ .collect();
+
+ SemanticTokenStylizer {
+ server_id,
+ rules_by_token_type,
+ token_type_names: token_types,
+ modifier_mask,
+ }
+ }
+
+ pub fn server_id(&self) -> LanguageServerId {
+ self.server_id
+ }
+
+ pub fn token_type_name(&self, token_type: TokenType) -> Option<&SharedString> {
+ self.token_type_names.get(&token_type)
+ }
+
+ pub fn has_modifier(&self, token_modifiers: u32, modifier: &str) -> bool {
+ let Some(mask) = self.modifier_mask.get(modifier) else {
+ return false;
+ };
+ (token_modifiers & mask) != 0
+ }
+
+ pub fn token_modifiers(&self, token_modifiers: u32) -> Option<String> {
+ let modifiers: Vec<&str> = self
+ .modifier_mask
+ .iter()
+ .filter(|(_, mask)| (token_modifiers & *mask) != 0)
+ .map(|(name, _)| name.as_ref())
+ .collect();
+ if modifiers.is_empty() {
+ None
+ } else {
+ Some(modifiers.join(", "))
+ }
+ }
+
+ pub fn rules_for_token(&self, token_type: TokenType) -> Option<&[SemanticTokenRule]> {
+ self.rules_by_token_type
+ .get(&token_type)
+ .map(|v| v.as_slice())
+ }
+}
+
+async fn raw_to_buffer_semantic_tokens(
+ raw_tokens: RawSemanticTokens,
+ buffer_snapshot: &text::BufferSnapshot,
+) -> HashMap<LanguageServerId, Arc<[BufferSemanticToken]>> {
+ let mut res = HashMap::default();
+ for (&server_id, server_tokens) in &raw_tokens.servers {
+ // We don't do `collect` here due to the filter map not pre-allocating
+ // we'd rather over allocate here than not since we have to re-allocate into an arc slice anyways
+ let mut buffer_tokens = Vec::with_capacity(server_tokens.data.len() / 5);
+ // 5000 was chosen by profiling, on a decent machine this will take about 1ms per chunk
+ // This is to avoid blocking the main thread for hundreds of milliseconds at a time for very big files
+ // If we every change the below code to not query the underlying rope 6 times per token we can bump this up
+ for chunk in server_tokens.tokens().chunks(5000).into_iter() {
+ buffer_tokens.extend(chunk.filter_map(|token| {
+ let start = Unclipped(PointUtf16::new(token.line, token.start));
+ let clipped_start = buffer_snapshot.clip_point_utf16(start, Bias::Left);
+ let start_offset = buffer_snapshot
+ .as_rope()
+ .point_utf16_to_offset_utf16(clipped_start);
+ let end_offset = start_offset + OffsetUtf16(token.length as usize);
+
+ let start = buffer_snapshot
+ .as_rope()
+ .offset_utf16_to_offset(start_offset);
+ let end = buffer_snapshot.as_rope().offset_utf16_to_offset(end_offset);
+
+ if start == end {
+ return None;
+ }
+
+ Some(BufferSemanticToken {
+ range: buffer_snapshot.anchor_before(start)..buffer_snapshot.anchor_after(end),
+ token_type: token.token_type,
+ token_modifiers: token.token_modifiers,
+ })
+ }));
+ yield_now().await;
+ }
+
+ res.insert(server_id, buffer_tokens.into());
+ yield_now().await;
+ }
+ res
+}
+
+#[derive(Default, Debug)]
+pub struct SemanticTokensData {
+ pub(super) raw_tokens: RawSemanticTokens,
+ pub(super) latest_invalidation_requests: HashMap<LanguageServerId, Option<usize>>,
+ update: Option<(Global, SemanticTokensTask)>,
+}
+
+/// All the semantic token tokens for a buffer.
+///
+/// This aggregates semantic tokens from multiple language servers in a specific order.
+/// Semantic tokens later in the list will override earlier ones in case of overlap.
+#[derive(Default, Debug, Clone)]
+pub(super) struct RawSemanticTokens {
+ pub servers: HashMap<lsp::LanguageServerId, Arc<ServerSemanticTokens>>,
+}
+
+/// All the semantic tokens for a buffer, from a single language server.
+#[derive(Debug, Clone)]
+pub struct ServerSemanticTokens {
+ /// Each value is:
+ /// data[5*i] - deltaLine: token line number, relative to the start of the previous token
+ /// data[5*i+1] - deltaStart: token start character, relative to the start of the previous token (relative to 0 or the previous tokenβs start if they are on the same line)
+ /// data[5*i+2] - length: the length of the token.
+ /// data[5*i+3] - tokenType: will be looked up in SemanticTokensLegend.tokenTypes. We currently ask that tokenType < 65536.
+ /// data[5*i+4] - tokenModifiers: each set bit will be looked up in SemanticTokensLegend.tokenModifiers
+ ///
+ /// See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/ for more.
+ data: Vec<u32>,
+
+ pub(crate) result_id: Option<SharedString>,
+}
+
+pub struct SemanticTokensIter<'a> {
+ prev: Option<(u32, u32)>,
+ data: ChunksExact<'a, u32>,
+}
+
+// A single item from `data`.
+struct SemanticTokenValue {
+ delta_line: u32,
+ delta_start: u32,
+ length: u32,
+ token_type: TokenType,
+ token_modifiers: u32,
+}
+
+/// A semantic token, independent of its position.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SemanticToken {
+ pub line: u32,
+ pub start: u32,
+ pub length: u32,
+ pub token_type: TokenType,
+ pub token_modifiers: u32,
+}
+
+impl ServerSemanticTokens {
+ pub fn from_full(data: Vec<u32>, result_id: Option<SharedString>) -> Self {
+ ServerSemanticTokens { data, result_id }
+ }
+
+ pub(crate) fn apply(&mut self, edits: &[SemanticTokensEdit]) {
+ for edit in edits {
+ let start = edit.start as usize;
+ let end = start + edit.delete_count as usize;
+ self.data.splice(start..end, edit.data.iter().copied());
+ }
+ }
+
+ pub fn tokens(&self) -> SemanticTokensIter<'_> {
+ SemanticTokensIter {
+ prev: None,
+ data: self.data.chunks_exact(5),
+ }
+ }
+}
+
+impl Iterator for SemanticTokensIter<'_> {
+ type Item = SemanticToken;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let chunk = self.data.next()?;
+ let token = SemanticTokenValue {
+ delta_line: chunk[0],
+ delta_start: chunk[1],
+ length: chunk[2],
+ token_type: TokenType(chunk[3]),
+ token_modifiers: chunk[4],
+ };
+
+ let (line, start) = if let Some((last_line, last_start)) = self.prev {
+ let line = last_line + token.delta_line;
+ let start = if token.delta_line == 0 {
+ last_start + token.delta_start
+ } else {
+ token.delta_start
+ };
+ (line, start)
+ } else {
+ (token.delta_line, token.delta_start)
+ };
+
+ self.prev = Some((line, start));
+
+ Some(SemanticToken {
+ line,
+ start,
+ length: token.length,
+ token_type: token.token_type,
+ token_modifiers: token.token_modifiers,
+ })
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::lsp_command::SemanticTokensEdit;
+ use lsp::SEMANTIC_TOKEN_MODIFIERS;
+
+ fn modifier_names(bits: u32) -> String {
+ if bits == 0 {
+ return "-".to_string();
+ }
+ let names: Vec<&str> = SEMANTIC_TOKEN_MODIFIERS
+ .iter()
+ .enumerate()
+ .filter(|(i, _)| bits & (1 << i) != 0)
+ .map(|(_, m)| m.as_str())
+ .collect();
+
+ // Check for unknown bits
+ let known_bits = (1u32 << SEMANTIC_TOKEN_MODIFIERS.len()) - 1;
+ let unknown = bits & !known_bits;
+
+ if unknown != 0 {
+ let mut result = names.join("+");
+ if !result.is_empty() {
+ result.push('+');
+ }
+ result.push_str(&format!("?0x{:x}", unknown));
+ result
+ } else {
+ names.join("+")
+ }
+ }
+
+ /// Debug tool: parses semantic token JSON from LSP and prints human-readable output.
+ ///
+ /// Usage: Paste JSON into `json_input`, then run:
+ /// cargo test -p project debug_parse_tokens -- --nocapture --ignored
+ ///
+ /// Accepts either:
+ /// - Full LSP response: `{"jsonrpc":"2.0","id":1,"result":{"data":[...]}}`
+ /// - Just the data array: `[0,0,5,1,0,...]`
+ ///
+ /// For delta responses, paste multiple JSON messages (one per line) and they
+ /// will be applied in sequence.
+ ///
+ /// Token encoding (5 values per token):
+ /// [deltaLine, deltaStart, length, tokenType, tokenModifiers]
+ #[test]
+ #[ignore] // Run with: cargo test -p project debug_parse_tokens -- --nocapture --ignored
+ fn debug_parse_tokens() {
+ // ============================================================
+ // PASTE YOUR JSON HERE (one message per line for sequences)
+ // Comments starting with // are ignored
+ // ============================================================
+ let json_input = r#"
+// === EXAMPLE 1: Full response (LSP spec example) ===
+// 3 tokens: property at line 2, type at line 2, class at line 5
+{"jsonrpc":"2.0","id":1,"result":{"resultId":"1","data":[2,5,3,9,3,0,5,4,6,0,3,2,7,1,0]}}
+
+// === EXAMPLE 2: Delta response ===
+// User added empty line at start of file, so all tokens shift down by 1 line.
+// This changes first token's deltaLine from 2 to 3 (edit at index 0).
+{"jsonrpc":"2.0","id":2,"result":{"resultId":"2","edits":[{"start":0,"deleteCount":1,"data":[3]}]}}
+
+// === EXAMPLE 3: Another delta ===
+// User added a new token. Insert 5 values at position 5 (after first token).
+// New token: same line as token 1, 2 chars after it ends, len 5, type=function(12), mods=definition(2)
+{"jsonrpc":"2.0","id":3,"result":{"resultId":"3","edits":[{"start":5,"deleteCount":0,"data":[0,2,5,12,2]}]}}
+ "#;
+ // Accepted formats:
+ // - Full response: {"result":{"data":[...]}}
+ // - Delta response: {"result":{"edits":[{"start":N,"deleteCount":N,"data":[...]}]}}
+ // - Just array: [0,0,5,1,0,...]
+
+ // ============================================================
+ // PROCESSING
+ // ============================================================
+ let mut current_data: Vec<u32> = Vec::new();
+ let mut result_id: Option<String> = None;
+
+ for line in json_input.lines() {
+ let line = line.trim();
+ if line.is_empty() || line.starts_with("//") {
+ continue;
+ }
+
+ let parsed: serde_json::Value =
+ serde_json::from_str(line).expect("Failed to parse JSON");
+
+ // Try to extract data from various JSON shapes
+ let (data, edits, new_result_id) = extract_semantic_tokens(&parsed);
+
+ if let Some(new_id) = new_result_id {
+ result_id = Some(new_id);
+ }
+
+ if let Some(full_data) = data {
+ println!("\n{}", "=".repeat(70));
+ println!("FULL RESPONSE (resultId: {:?})", result_id);
+ current_data = full_data;
+ } else if let Some(delta_edits) = edits {
+ println!("\n{}", "=".repeat(70));
+ println!(
+ "DELTA RESPONSE: {} edit(s) (resultId: {:?})",
+ delta_edits.len(),
+ result_id
+ );
+ for (i, edit) in delta_edits.iter().enumerate() {
+ println!(
+ " [{}] start={}, delete={}, insert {} values",
+ i,
+ edit.start,
+ edit.delete_count,
+ edit.data.len()
+ );
+ }
+ let mut tokens = ServerSemanticTokens::from_full(current_data.clone(), None);
+ tokens.apply(&delta_edits);
+ current_data = tokens.data;
+ }
+ }
+
+ // Print parsed tokens
+ println!(
+ "\nDATA: {} values = {} tokens",
+ current_data.len(),
+ current_data.len() / 5
+ );
+ println!("\nPARSED TOKENS:");
+ println!("{:-<100}", "");
+ println!(
+ "{:>5} {:>6} {:>4} {:<15} {}",
+ "LINE", "START", "LEN", "TYPE", "MODIFIERS"
+ );
+ println!("{:-<100}", "");
+
+ let tokens = ServerSemanticTokens::from_full(current_data, None);
+ for token in tokens.tokens() {
+ println!(
+ "{:>5} {:>6} {:>4} {:<15} {}",
+ token.line,
+ token.start,
+ token.length,
+ token.token_type.0,
+ modifier_names(token.token_modifiers),
+ );
+ }
+ println!("{:-<100}", "");
+ println!("{}\n", "=".repeat(100));
+ }
+
+ fn extract_semantic_tokens(
+ value: &serde_json::Value,
+ ) -> (
+ Option<Vec<u32>>,
+ Option<Vec<SemanticTokensEdit>>,
+ Option<String>,
+ ) {
+ // Try as array directly: [1,2,3,...]
+ if let Some(arr) = value.as_array() {
+ let data: Vec<u32> = arr
+ .iter()
+ .filter_map(|v| v.as_u64().map(|n| n as u32))
+ .collect();
+ return (Some(data), None, None);
+ }
+
+ // Try as LSP response: {"result": {"data": [...]} } or {"result": {"edits": [...]}}
+ let result = value.get("result").unwrap_or(value);
+ let result_id = result
+ .get("resultId")
+ .and_then(|v| v.as_str())
+ .map(String::from);
+
+ // Full response with data
+ if let Some(data_arr) = result.get("data").and_then(|v| v.as_array()) {
+ let data: Vec<u32> = data_arr
+ .iter()
+ .filter_map(|v| v.as_u64().map(|n| n as u32))
+ .collect();
+ return (Some(data), None, result_id);
+ }
+
+ // Delta response with edits
+ if let Some(edits_arr) = result.get("edits").and_then(|v| v.as_array()) {
+ let edits: Vec<SemanticTokensEdit> = edits_arr
+ .iter()
+ .filter_map(|e| {
+ Some(SemanticTokensEdit {
+ start: e.get("start")?.as_u64()? as u32,
+ delete_count: e.get("deleteCount")?.as_u64()? as u32,
+ data: e
+ .get("data")
+ .and_then(|d| d.as_array())
+ .map(|arr| {
+ arr.iter()
+ .filter_map(|v| v.as_u64().map(|n| n as u32))
+ .collect()
+ })
+ .unwrap_or_default(),
+ })
+ })
+ .collect();
+ return (None, Some(edits), result_id);
+ }
+
+ (None, None, result_id)
+ }
+
+ #[test]
+ fn parses_sample_tokens() {
+ // Example from the spec: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens
+ let tokens = ServerSemanticTokens::from_full(
+ vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0, 3, 2, 7, 2, 0],
+ None,
+ )
+ .tokens()
+ .collect::<Vec<SemanticToken>>();
+
+ // The spec uses 1-based line numbers, and 0-based character numbers. This test uses 0-based for both.
+ assert_eq!(
+ tokens,
+ &[
+ SemanticToken {
+ line: 2,
+ start: 5,
+ length: 3,
+ token_type: TokenType(0),
+ token_modifiers: 3
+ },
+ SemanticToken {
+ line: 2,
+ start: 10,
+ length: 4,
+ token_type: TokenType(1),
+ token_modifiers: 0
+ },
+ SemanticToken {
+ line: 5,
+ start: 2,
+ length: 7,
+ token_type: TokenType(2),
+ token_modifiers: 0
+ }
+ ]
+ );
+ }
+
+ #[test]
+ fn applies_delta_edit() {
+ // Example from the spec: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens
+ // After a user types a new empty line at the beginning of the file,
+ // the tokens shift down by one line. The delta edit transforms
+ // [2,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0] into [3,5,3,0,3, 0,5,4,1,0, 3,2,7,2,0]
+ // by replacing the first element (deltaLine of first token) from 2 to 3.
+
+ let mut tokens = ServerSemanticTokens::from_full(
+ vec![2, 5, 3, 0, 3, 0, 5, 4, 1, 0, 3, 2, 7, 2, 0],
+ None,
+ );
+
+ tokens.apply(&[SemanticTokensEdit {
+ start: 0,
+ delete_count: 1,
+ data: vec![3],
+ }]);
+
+ let result = tokens.tokens().collect::<Vec<SemanticToken>>();
+
+ assert_eq!(
+ result,
+ &[
+ SemanticToken {
+ line: 3,
+ start: 5,
+ length: 3,
+ token_type: TokenType(0),
+ token_modifiers: 3
+ },
+ SemanticToken {
+ line: 3,
+ start: 10,
+ length: 4,
+ token_type: TokenType(1),
+ token_modifiers: 0
+ },
+ SemanticToken {
+ line: 6,
+ start: 2,
+ length: 7,
+ token_type: TokenType(2),
+ token_modifiers: 0
+ }
+ ]
+ );
+ }
+}
@@ -386,6 +386,10 @@ pub enum Event {
server_id: LanguageServerId,
request_id: Option<usize>,
},
+ RefreshSemanticTokens {
+ server_id: LanguageServerId,
+ request_id: Option<usize>,
+ },
RefreshCodeLens,
RevealInProjectPanel(ProjectEntryId),
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
@@ -3352,6 +3356,13 @@ impl Project {
server_id: *server_id,
request_id: *request_id,
}),
+ LspStoreEvent::RefreshSemanticTokens {
+ server_id,
+ request_id,
+ } => cx.emit(Event::RefreshSemanticTokens {
+ server_id: *server_id,
+ request_id: *request_id,
+ }),
LspStoreEvent::RefreshCodeLens => cx.emit(Event::RefreshCodeLens),
LspStoreEvent::LanguageServerPrompt(prompt) => {
cx.emit(Event::LanguageServerPrompt(prompt.clone()))
@@ -22,8 +22,8 @@ pub use settings::DirenvSettings;
pub use settings::LspSettings;
use settings::{
DapSettingsContent, EditorconfigEvent, InvalidSettingsError, LocalSettingsKind,
- LocalSettingsPath, RegisterSetting, Settings, SettingsLocation, SettingsStore,
- parse_json_with_comments, watch_config_file,
+ LocalSettingsPath, RegisterSetting, SemanticTokenRules, Settings, SettingsLocation,
+ SettingsStore, parse_json_with_comments, watch_config_file,
};
use std::{cell::OnceCell, collections::BTreeMap, path::PathBuf, sync::Arc, time::Duration};
use task::{DebugTaskFile, TaskTemplates, VsCodeDebugTaskFile, VsCodeTaskFile};
@@ -125,6 +125,9 @@ pub struct GlobalLspSettings {
/// Default: `true`
pub button: bool,
pub notifications: LspNotificationSettings,
+
+ /// Rules for highlighting semantic tokens.
+ pub semantic_token_rules: SemanticTokenRules,
}
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, Debug)]
@@ -636,6 +639,14 @@ impl Settings for ProjectSettings {
.unwrap()
.dismiss_timeout_ms,
},
+ semantic_token_rules: content
+ .global_lsp_settings
+ .as_ref()
+ .unwrap()
+ .semantic_token_rules
+ .as_ref()
+ .unwrap()
+ .clone(),
},
dap: project
.dap
@@ -146,6 +146,34 @@ message InlayHintsResponse {
repeated VectorClockEntry version = 2;
}
+message SemanticTokens {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+ optional uint64 for_server = 3;
+ repeated VectorClockEntry version = 4;
+}
+
+message SemanticTokensResponse {
+ // Only `data` or `edits` may be set.
+ // `oneof` does not allow `repeated` fields.
+ repeated uint32 data = 1;
+ repeated SemanticTokensEdit edits = 2;
+ optional string result_id = 3;
+ repeated VectorClockEntry version = 4;
+}
+
+message SemanticTokensEdit {
+ uint32 start = 1;
+ uint32 delete_count = 2;
+ repeated uint32 data = 3;
+}
+
+message RefreshSemanticTokens {
+ uint64 project_id = 1;
+ uint64 server_id = 2;
+ optional uint64 request_id = 3;
+}
+
message PointUtf16 {
uint32 row = 1;
uint32 column = 2;
@@ -820,6 +848,7 @@ message LspQuery {
GetTypeDefinition get_type_definition = 12;
GetImplementation get_implementation = 13;
InlayHints inlay_hints = 14;
+ SemanticTokens semantic_tokens = 16;
}
}
@@ -843,6 +872,7 @@ message LspResponse {
GetImplementationResponse get_implementation_response = 11;
GetReferencesResponse get_references_response = 12;
InlayHintsResponse inlay_hints_response = 13;
+ SemanticTokensResponse semantic_tokens_response = 14;
}
uint64 server_id = 7;
}
@@ -463,7 +463,11 @@ message Envelope {
DownloadFileByPath download_file_by_path = 415;
DownloadFileResponse download_file_response = 416;
- CreateFileForPeer create_file_for_peer = 417; // current max
+ CreateFileForPeer create_file_for_peer = 417;
+
+ SemanticTokens semantic_tokens = 418;
+ SemanticTokensResponse semantic_tokens_response = 419;
+ RefreshSemanticTokens refresh_semantic_tokens = 420; // current max
}
reserved 87 to 88;
@@ -135,6 +135,8 @@ messages!(
(IncomingCall, Foreground),
(InlayHints, Background),
(InlayHintsResponse, Background),
+ (SemanticTokens, Background),
+ (SemanticTokensResponse, Background),
(InstallExtension, Background),
(InviteChannelMember, Foreground),
(JoinChannel, Foreground),
@@ -199,7 +201,8 @@ messages!(
(PrepareRename, Background),
(PrepareRenameResponse, Background),
(ProjectEntryResponse, Foreground),
- (RefreshInlayHints, Foreground),
+ (RefreshInlayHints, Background),
+ (RefreshSemanticTokens, Background),
(RegisterBufferWithLanguageServers, Background),
(RejoinChannelBuffers, Foreground),
(RejoinChannelBuffersResponse, Foreground),
@@ -404,6 +407,7 @@ request_messages!(
(GetUsers, UsersResponse),
(IncomingCall, Ack),
(InlayHints, InlayHintsResponse),
+ (SemanticTokens, SemanticTokensResponse),
(GetCodeLens, GetCodeLensResponse),
(InviteChannelMember, Ack),
(JoinChannel, JoinRoomResponse),
@@ -427,6 +431,7 @@ request_messages!(
(Ping, Ack),
(PrepareRename, PrepareRenameResponse),
(RefreshInlayHints, Ack),
+ (RefreshSemanticTokens, Ack),
(RefreshCodeLens, Ack),
(RejoinChannelBuffers, RejoinChannelBuffersResponse),
(RejoinRoom, RejoinRoomResponse),
@@ -562,6 +567,7 @@ lsp_messages!(
(GetTypeDefinition, GetTypeDefinitionResponse, true),
(GetImplementation, GetImplementationResponse, true),
(InlayHints, InlayHintsResponse, false),
+ (SemanticTokens, SemanticTokensResponse, true)
);
entity_messages!(
@@ -606,6 +612,7 @@ entity_messages!(
OpenUncommittedDiff,
GetTypeDefinition,
InlayHints,
+ SemanticTokens,
JoinProject,
LeaveProject,
LinkedEditingRange,
@@ -624,6 +631,7 @@ entity_messages!(
PerformRename,
PrepareRename,
RefreshInlayHints,
+ RefreshSemanticTokens,
RefreshCodeLens,
ReloadBuffers,
RemoveProjectCollaborator,
@@ -913,6 +921,7 @@ impl LspQuery {
Some(lsp_query::Request::GetReferences(_)) => ("GetReferences", false),
Some(lsp_query::Request::GetDocumentColor(_)) => ("GetDocumentColor", false),
Some(lsp_query::Request::InlayHints(_)) => ("InlayHints", false),
+ Some(lsp_query::Request::SemanticTokens(_)) => ("SemanticTokens", false),
None => ("<unknown>", true),
}
}
@@ -376,6 +376,9 @@ impl AnyProtoClient {
Response::InlayHintsResponse(response) => {
to_any_envelope(&envelope, response)
}
+ Response::SemanticTokensResponse(response) => {
+ to_any_envelope(&envelope, response)
+ }
};
Some(proto::ProtoLspResponse {
server_id,
@@ -1,6 +1,6 @@
use crate::{
- BufferSearchBar, FocusSearch, NextHistoryQuery, PreviousHistoryQuery, ReplaceAll, ReplaceNext,
- SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch,
+ BufferSearchBar, FocusSearch, HighlightKey, NextHistoryQuery, PreviousHistoryQuery, ReplaceAll,
+ ReplaceNext, SearchOption, SearchOptions, SearchSource, SelectNextMatch, SelectPreviousMatch,
ToggleCaseSensitive, ToggleIncludeIgnored, ToggleRegex, ToggleReplace, ToggleWholeWord,
buffer_search::Deploy,
search_bar::{
@@ -1467,7 +1467,7 @@ impl ProjectSearchView {
if match_ranges.is_empty() {
self.active_match_index = None;
self.results_editor.update(cx, |editor, cx| {
- editor.clear_background_highlights::<Self>(cx);
+ editor.clear_background_highlights(HighlightKey::ProjectSearchView, cx);
});
} else {
self.active_match_index = Some(0);
@@ -1524,7 +1524,8 @@ impl ProjectSearchView {
cx: &mut App,
) {
self.results_editor.update(cx, |editor, cx| {
- editor.highlight_background::<Self>(
+ editor.highlight_background(
+ HighlightKey::ProjectSearchView,
match_ranges,
move |index, theme| {
if active_index == Some(*index) {
@@ -1,5 +1,6 @@
use bitflags::bitflags;
pub use buffer_search::BufferSearchBar;
+pub use editor::HighlightKey;
use editor::SearchSettings;
use gpui::{Action, App, ClickEvent, FocusHandle, IntoElement, actions};
use project::search::SearchQuery;
@@ -141,6 +141,10 @@ pub fn default_settings() -> Cow<'static, str> {
asset_str::<SettingsAssets>("settings/default.json")
}
+pub fn default_semantic_token_rules() -> Cow<'static, str> {
+ asset_str::<SettingsAssets>("settings/default_semantic_token_rules.json")
+}
+
#[cfg(target_os = "macos")]
pub const DEFAULT_KEYMAP_PATH: &str = "keymaps/default-macos.json";
@@ -33,8 +33,8 @@ use crate::editorconfig_store::EditorconfigStore;
use crate::{
ActiveSettingsProfileName, FontFamilyName, IconThemeName, LanguageSettingsContent,
- LanguageToSettingsMap, LspSettings, LspSettingsMap, ThemeName, UserSettingsContentExt,
- VsCodeSettings, WorktreeId,
+ LanguageToSettingsMap, LspSettings, LspSettingsMap, SemanticTokenRules, ThemeName,
+ UserSettingsContentExt, VsCodeSettings, WorktreeId,
settings_content::{
ExtensionsSettingsContent, ProjectSettingsContent, RootUserSettings, SettingsContent,
UserSettingsContent, merge_from::MergeFrom,
@@ -151,6 +151,8 @@ pub struct SettingsStore {
extension_settings: Option<Box<SettingsContent>>,
server_settings: Option<Box<SettingsContent>>,
+ language_semantic_token_rules: HashMap<SharedString, SemanticTokenRules>,
+
merged_settings: Rc<SettingsContent>,
local_settings: BTreeMap<(WorktreeId, Arc<RelPath>), SettingsContent>,
@@ -272,11 +274,30 @@ pub struct SettingsJsonSchemaParams<'a> {
impl SettingsStore {
pub fn new(cx: &mut App, default_settings: &str) -> Self {
+ Self::new_with_semantic_tokens(cx, default_settings, &crate::default_semantic_token_rules())
+ }
+
+ pub fn new_with_semantic_tokens(
+ cx: &mut App,
+ default_settings: &str,
+ default_semantic_tokens: &str,
+ ) -> Self {
let (setting_file_updates_tx, mut setting_file_updates_rx) = mpsc::unbounded();
- let default_settings: Rc<SettingsContent> =
- SettingsContent::parse_json_with_comments(default_settings)
- .unwrap()
- .into();
+ let mut default_settings: SettingsContent =
+ SettingsContent::parse_json_with_comments(default_settings).unwrap();
+ if let Ok(semantic_token_rules) =
+ crate::parse_json_with_comments::<SemanticTokenRules>(default_semantic_tokens)
+ {
+ let global_lsp = default_settings
+ .global_lsp_settings
+ .get_or_insert_with(Default::default);
+ let existing_rules = global_lsp
+ .semantic_token_rules
+ .get_or_insert_with(Default::default);
+ existing_rules.rules.extend(semantic_token_rules.rules);
+ }
+
+ let default_settings: Rc<SettingsContent> = default_settings.into();
let mut this = Self {
setting_values: Default::default(),
default_settings: default_settings.clone(),
@@ -284,6 +305,7 @@ impl SettingsStore {
server_settings: None,
user_settings: None,
extension_settings: None,
+ language_semantic_token_rules: HashMap::default(),
merged_settings: default_settings,
local_settings: BTreeMap::default(),
@@ -842,6 +864,29 @@ impl SettingsStore {
Ok(())
}
+ /// Sets language-specific semantic token rules.
+ ///
+ /// These rules are registered by language modules (e.g. the Rust language module)
+ /// and are stored separately from the global rules. They are only applied to
+ /// buffers of the matching language by the `SemanticTokenStylizer`.
+ ///
+ /// These should be registered before any `SemanticTokenStylizer` instances are
+ /// created (typically during `languages::init`), as existing cached stylizers
+ /// are not automatically invalidated.
+ pub fn set_language_semantic_token_rules(
+ &mut self,
+ language: SharedString,
+ rules: SemanticTokenRules,
+ ) {
+ self.language_semantic_token_rules.insert(language, rules);
+ }
+
+ /// Returns the language-specific semantic token rules for the given language,
+ /// if any have been registered.
+ pub fn language_semantic_token_rules(&self, language: &str) -> Option<&SemanticTokenRules> {
+ self.language_semantic_token_rules.get(language)
+ }
+
/// Add or remove a set of local settings via a JSON string.
pub fn set_local_settings(
&mut self,
@@ -2,6 +2,7 @@ use crate::*;
use anyhow::{Context as _, Result, anyhow};
use collections::HashMap;
use fs::Fs;
+use gpui::Rgba;
use paths::{cursor_settings_file_paths, vscode_settings_file_paths};
use serde::Deserialize;
use serde_json::{Map, Value};
@@ -186,7 +187,10 @@ impl VsCodeSettings {
file_finder: None,
git: self.git_settings_content(),
git_panel: self.git_panel_settings_content(),
- global_lsp_settings: None,
+ global_lsp_settings: skip_default(GlobalLspSettingsContent {
+ semantic_token_rules: self.semantic_token_rules(),
+ ..GlobalLspSettingsContent::default()
+ }),
helix_mode: None,
image_viewer: None,
journal: None,
@@ -357,6 +361,105 @@ impl VsCodeSettings {
})
}
+ fn semantic_token_rules(&self) -> Option<SemanticTokenRules> {
+ let customizations = self
+ .read_value("editor.semanticTokenColorCustomizations")?
+ .as_object()?;
+
+ skip_default(SemanticTokenRules {
+ rules: customizations
+ .get("rules")
+ .and_then(|v| {
+ Some(
+ v.as_object()?
+ .iter()
+ .filter_map(|(k, v)| {
+ let v = v.as_object()?;
+
+ let mut underline = v
+ .get("underline")
+ .and_then(|b| b.as_bool())
+ .unwrap_or(false);
+ let strikethrough = v
+ .get("strikethrough")
+ .and_then(|b| b.as_bool())
+ .unwrap_or(false);
+ let mut font_weight =
+ v.get("bold").and_then(|b| b.as_bool()).map(|b| {
+ if b {
+ SemanticTokenFontWeight::Bold
+ } else {
+ SemanticTokenFontWeight::Normal
+ }
+ });
+ let mut font_style =
+ v.get("italic").and_then(|b| b.as_bool()).map(|b| {
+ if b {
+ SemanticTokenFontStyle::Italic
+ } else {
+ SemanticTokenFontStyle::Normal
+ }
+ });
+
+ match v.get("fontStyle").and_then(|s| s.as_str()).unwrap_or("") {
+ "bold" => {
+ font_style = Some(SemanticTokenFontStyle::Normal);
+ font_weight = Some(SemanticTokenFontWeight::Bold);
+ }
+ "italic" => {
+ font_style = Some(SemanticTokenFontStyle::Italic);
+ font_weight = Some(SemanticTokenFontWeight::Normal);
+ }
+ "underline" => {
+ underline = true;
+ }
+ "bold italic" | "italic bold" => {
+ font_style = Some(SemanticTokenFontStyle::Italic);
+ font_weight = Some(SemanticTokenFontWeight::Bold);
+ }
+ "normal" => {
+ font_style = Some(SemanticTokenFontStyle::Normal);
+ font_weight = Some(SemanticTokenFontWeight::Normal);
+ }
+ _ => {}
+ }
+
+ let foreground = v
+ .get("foreground")
+ .and_then(|v| Rgba::try_from(v.as_str()?).ok())
+ .map(|s| s.to_owned());
+ let background = v
+ .get("background")
+ .and_then(|v| Rgba::try_from(v.as_str()?).ok())
+ .map(|s| s.to_owned());
+
+ Some(SemanticTokenRule {
+ token_type: Some(k.clone()),
+ token_modifiers: vec![],
+ style: vec![],
+ underline: if underline {
+ Some(SemanticTokenColorOverride::InheritForeground(true))
+ } else {
+ None
+ },
+ strikethrough: if strikethrough {
+ Some(SemanticTokenColorOverride::InheritForeground(true))
+ } else {
+ None
+ },
+ foreground_color: foreground,
+ background_color: background,
+ font_weight,
+ font_style,
+ })
+ })
+ .collect(),
+ )
+ })
+ .unwrap_or_default(),
+ })
+ }
+
fn minimap_content(&self) -> Option<MinimapContent> {
let minimap_enabled = self.read_bool("editor.minimap.enabled");
let autohide = self.read_bool("editor.minimap.autohide");
@@ -449,6 +552,15 @@ impl VsCodeSettings {
inlay_hints: None,
jsx_tag_auto_close: None,
language_servers: None,
+ semantic_tokens: self
+ .read_bool("editor.semanticHighlighting.enabled")
+ .map(|enabled| {
+ if enabled {
+ SemanticTokens::Full
+ } else {
+ SemanticTokens::Off
+ }
+ }),
linked_edits: self.read_bool("editor.linkedEditing"),
preferred_line_length: self.read_u32("editor.wordWrapColumn"),
prettier: None,
@@ -18,6 +18,7 @@ default = []
anyhow.workspace = true
collections.workspace = true
derive_more.workspace = true
+gpui.workspace = true
log.workspace = true
schemars.workspace = true
serde.workspace = true
@@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize, de::Error as _};
use settings_macros::{MergeFrom, with_fallible_options};
use std::sync::Arc;
-use crate::{ExtendingVec, merge_from};
+use crate::{ExtendingVec, SemanticTokens, merge_from};
/// The state of the modifier keys at some point in time
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom)]
@@ -422,6 +422,15 @@ pub struct LanguageSettingsContent {
///
/// Default: ["..."]
pub language_servers: Option<Vec<String>>,
+ /// Controls how semantic tokens from language servers are used for syntax highlighting.
+ ///
+ /// Options:
+ /// - "off": Do not request semantic tokens from language servers.
+ /// - "combined": Use LSP semantic tokens together with tree-sitter highlighting.
+ /// - "full": Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
+ ///
+ /// Default: "off"
+ pub semantic_tokens: Option<SemanticTokens>,
/// Controls where the `editor::Rewrap` action is allowed for this language.
///
/// Note: This setting has no effect in Vim mode, as rewrap is already
@@ -1,6 +1,7 @@
use std::{path::PathBuf, sync::Arc};
use collections::{BTreeMap, HashMap};
+use gpui::Rgba;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings_json::parse_json_with_comments;
@@ -193,7 +194,7 @@ pub struct FetchSettings {
/// Common language server settings.
#[with_fallible_options]
-#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom)]
+#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize, JsonSchema, MergeFrom)]
pub struct GlobalLspSettingsContent {
/// Whether to show the LSP servers button in the status bar.
///
@@ -201,6 +202,8 @@ pub struct GlobalLspSettingsContent {
pub button: Option<bool>,
/// Settings for language server notifications
pub notifications: Option<LspNotificationSettingsContent>,
+ /// Rules for rendering LSP semantic tokens.
+ pub semantic_token_rules: Option<SemanticTokenRules>,
}
#[with_fallible_options]
@@ -213,6 +216,84 @@ pub struct LspNotificationSettingsContent {
pub dismiss_timeout_ms: Option<u64>,
}
+/// Custom rules for rendering LSP semantic tokens.
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, JsonSchema)]
+#[serde(transparent)]
+pub struct SemanticTokenRules {
+ pub rules: Vec<SemanticTokenRule>,
+}
+
+impl crate::merge_from::MergeFrom for SemanticTokenRules {
+ fn merge_from(&mut self, other: &Self) {
+ self.rules.splice(0..0, other.rules.iter().cloned());
+ }
+}
+
+#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub struct SemanticTokenRule {
+ pub token_type: Option<String>,
+ #[serde(default)]
+ pub token_modifiers: Vec<String>,
+ #[serde(default)]
+ pub style: Vec<String>,
+ pub foreground_color: Option<Rgba>,
+ pub background_color: Option<Rgba>,
+ pub underline: Option<SemanticTokenColorOverride>,
+ pub strikethrough: Option<SemanticTokenColorOverride>,
+ pub font_weight: Option<SemanticTokenFontWeight>,
+ pub font_style: Option<SemanticTokenFontStyle>,
+}
+
+#[derive(Copy, Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, MergeFrom)]
+#[serde(untagged)]
+pub enum SemanticTokenColorOverride {
+ InheritForeground(bool),
+ Replace(Rgba),
+}
+
+#[derive(
+ Copy,
+ Clone,
+ Debug,
+ Default,
+ Serialize,
+ Deserialize,
+ PartialEq,
+ Eq,
+ JsonSchema,
+ MergeFrom,
+ strum::VariantArray,
+ strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum SemanticTokenFontWeight {
+ #[default]
+ Normal,
+ Bold,
+}
+
+#[derive(
+ Copy,
+ Clone,
+ Debug,
+ Default,
+ Serialize,
+ Deserialize,
+ PartialEq,
+ Eq,
+ JsonSchema,
+ MergeFrom,
+ strum::VariantArray,
+ strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum SemanticTokenFontStyle {
+ #[default]
+ Normal,
+ Italic,
+}
+
#[with_fallible_options]
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, MergeFrom)]
#[serde(rename_all = "snake_case")]
@@ -790,3 +790,42 @@ pub enum ProjectPanelSortMode {
pub struct ProjectPanelIndentGuidesSettings {
pub show: Option<ShowIndentGuides>,
}
+
+/// Controls how semantic tokens from language servers are used for syntax highlighting.
+#[derive(
+ Debug,
+ PartialEq,
+ Eq,
+ Clone,
+ Copy,
+ Default,
+ Serialize,
+ Deserialize,
+ JsonSchema,
+ MergeFrom,
+ strum::VariantArray,
+ strum::VariantNames,
+)]
+#[serde(rename_all = "snake_case")]
+pub enum SemanticTokens {
+ /// Do not request semantic tokens from language servers.
+ #[default]
+ Off,
+ /// Use LSP semantic tokens together with tree-sitter highlighting.
+ Combined,
+ /// Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
+ Full,
+}
+
+impl SemanticTokens {
+ /// Returns true if semantic tokens should be requested from language servers.
+ pub fn enabled(&self) -> bool {
+ self != &Self::Off
+ }
+
+ /// Returns true if tree-sitter syntax highlighting should be used.
+ /// In `full` mode, tree-sitter is disabled in favor of LSP semantic tokens.
+ pub fn use_tree_sitter(&self) -> bool {
+ self != &Self::Full
+ }
+}
@@ -68,7 +68,7 @@ pub(crate) fn settings_data(cx: &App) -> Vec<SettingsPage> {
}
fn general_page() -> SettingsPage {
- fn general_settings_section() -> [SettingsPageItem; 8] {
+ fn general_settings_section() -> [SettingsPageItem; 9] {
[
SettingsPageItem::SectionHeader("General Settings"),
SettingsPageItem::SettingItem(SettingItem {
@@ -187,6 +187,30 @@ fn general_page() -> SettingsPage {
metadata: None,
files: USER,
}),
+ SettingsPageItem::SettingItem(SettingItem {
+ title: "Semantic Tokens",
+ description: "If semantic tokens from language servers should be rendered.",
+ field: Box::new(SettingField {
+ json_path: Some("semantic_tokens"),
+ pick: |settings_content| {
+ settings_content
+ .project
+ .all_languages
+ .defaults
+ .semantic_tokens
+ .as_ref()
+ },
+ write: |settings_content, value| {
+ settings_content
+ .project
+ .all_languages
+ .defaults
+ .semantic_tokens = value;
+ },
+ }),
+ metadata: None,
+ files: USER,
+ }),
]
}
fn security_section() -> [SettingsPageItem; 2] {
@@ -537,6 +537,7 @@ fn init_renderers(cx: &mut App) {
.add_basic_renderer::<settings::WindowDecorations>(render_dropdown)
.add_basic_renderer::<settings::FontSize>(render_editable_number_field)
.add_basic_renderer::<settings::OllamaModelName>(render_ollama_model_picker)
+ .add_basic_renderer::<settings::SemanticTokens>(render_dropdown)
// please semicolon stay on next line
;
}
@@ -5,7 +5,8 @@ use crate::{
use std::{cmp::Ordering, fmt::Debug, ops::Range};
use sum_tree::{Bias, Dimensions};
-/// A timestamped position in a buffer
+/// A timestamped position in a buffer.
+#[doc(alias = "TextAnchor")]
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Anchor {
/// The timestamp of the operation that inserted the text
@@ -334,7 +334,7 @@ pub(crate) fn zed_default_dark() -> Theme {
..HighlightStyle::default()
},
),
- ("enum".into(), HighlightStyle::default()),
+ ("enum".into(), teal.into()),
("function".into(), blue.into()),
("function.method".into(), blue.into()),
("function.definition".into(), blue.into()),
@@ -42,6 +42,12 @@ impl SyntaxTheme {
.unwrap_or_default()
}
+ pub fn get_opt(&self, name: &str) -> Option<HighlightStyle> {
+ self.highlights
+ .iter()
+ .find_map(|entry| if entry.0 == name { Some(entry.1) } else { None })
+ }
+
pub fn color(&self, name: &str) -> Hsla {
self.get(name).color.unwrap_or_default()
}
@@ -7,14 +7,12 @@ use crate::{
state::{Mode, Register},
};
use collections::HashMap;
-use editor::{ClipboardSelection, Editor, SelectionEffects};
+use editor::{ClipboardSelection, Editor, HighlightKey, SelectionEffects};
use gpui::Context;
use gpui::Window;
use language::Point;
use settings::Settings;
-struct HighlightOnYank;
-
impl Vim {
pub fn yank_motion(
&mut self,
@@ -227,7 +225,8 @@ impl Vim {
return;
}
- editor.highlight_background::<HighlightOnYank>(
+ editor.highlight_background(
+ HighlightKey::HighlightOnYank,
&ranges_to_highlight,
|_, colors| colors.colors().editor_document_highlight_read_background,
cx,
@@ -237,7 +236,7 @@ impl Vim {
.timer(Duration::from_millis(highlight_duration))
.await;
this.update(cx, |editor, cx| {
- editor.clear_background_highlights::<HighlightOnYank>(cx)
+ editor.clear_background_highlights(HighlightKey::HighlightOnYank, cx)
})
.ok();
})
@@ -5,7 +5,7 @@ use crate::{
state::Mode,
};
use editor::{
- Anchor, Bias, Editor, EditorSnapshot, SelectionEffects, ToOffset, ToPoint,
+ Anchor, Bias, Editor, EditorSnapshot, HighlightKey, SelectionEffects, ToOffset, ToPoint,
display_map::ToDisplayPoint,
};
use gpui::{ClipboardEntry, Context, Window, actions};
@@ -40,8 +40,6 @@ pub fn register(editor: &mut Editor, cx: &mut Context<Vim>) {
});
}
-struct VimExchange;
-
impl Vim {
pub(crate) fn multi_replace(
&mut self,
@@ -181,7 +179,7 @@ impl Vim {
pub fn clear_exchange(&mut self, window: &mut Window, cx: &mut Context<Self>) {
self.stop_recording(cx);
self.update_editor(cx, |_, editor, cx| {
- editor.clear_background_highlights::<VimExchange>(cx);
+ editor.clear_background_highlights(HighlightKey::VimExchange, cx);
});
self.clear_operator(window, cx);
}
@@ -229,7 +227,8 @@ impl Vim {
window: &mut Window,
cx: &mut Context<Editor>,
) {
- if let Some((_, ranges)) = editor.clear_background_highlights::<VimExchange>(cx) {
+ if let Some((_, ranges)) = editor.clear_background_highlights(HighlightKey::VimExchange, cx)
+ {
let previous_range = ranges[0].clone();
let new_range_start = new_range.start.to_offset(&snapshot.buffer_snapshot());
@@ -271,7 +270,8 @@ impl Vim {
}
} else {
let ranges = [new_range];
- editor.highlight_background::<VimExchange>(
+ editor.highlight_background(
+ HighlightKey::VimExchange,
&ranges,
|_, theme| theme.colors().editor_document_highlight_read_background,
cx,
@@ -902,7 +902,7 @@ fn assert_pending_input(cx: &mut VimTestContext, expected: &str) {
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let highlights = editor
- .text_highlights::<editor::PendingInput>(cx)
+ .text_highlights(editor::HighlightKey::PendingInput, cx)
.unwrap()
.1;
let (_, ranges) = marked_text_ranges(expected, false);
@@ -966,7 +966,7 @@ async fn test_jk_delay(cx: &mut gpui::TestAppContext) {
cx.update_editor(|editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
let highlights = editor
- .text_highlights::<editor::PendingInput>(cx)
+ .text_highlights(editor::HighlightKey::PendingInput, cx)
.unwrap()
.1;
@@ -25,6 +25,7 @@ test-support = [
"image_viewer/test-support",
"recent_projects/test-support",
"repl/test-support",
+ "title_bar/test-support",
]
visual-tests = [
"gpui/test-support",
@@ -250,6 +251,7 @@ semver.workspace = true
terminal_view = { workspace = true, features = ["test-support"] }
tree-sitter-md.workspace = true
tree-sitter-rust.workspace = true
+title_bar = { workspace = true, features = ["test-support"] }
workspace = { workspace = true, features = ["test-support"] }
image.workspace = true
agent_ui = { workspace = true, features = ["test-support"] }
@@ -123,6 +123,8 @@ actions!(
OpenTasks,
/// Opens debug tasks configuration.
OpenDebugTasks,
+ /// Shows the default semantic token rules (read-only).
+ ShowDefaultSemanticTokenRules,
/// Resets the application database.
ResetDatabase,
/// Shows all hidden windows.
@@ -242,6 +244,18 @@ pub fn init(cx: &mut App) {
);
});
})
+ .on_action(|_: &ShowDefaultSemanticTokenRules, cx| {
+ with_active_or_new_workspace(cx, |workspace, window, cx| {
+ open_bundled_file(
+ workspace,
+ settings::default_semantic_token_rules(),
+ "Default Semantic Token Rules",
+ "JSONC",
+ window,
+ cx,
+ );
+ });
+ })
.on_action(|_: &OpenDefaultSettings, cx| {
with_active_or_new_workspace(cx, |workspace, window, cx| {
open_bundled_file(
@@ -1267,6 +1281,9 @@ fn initialize_pane(
toolbar.add_item(telemetry_log_item, window, cx);
let syntax_tree_item = cx.new(|_| language_tools::SyntaxTreeToolbarItemView::new());
toolbar.add_item(syntax_tree_item, window, cx);
+ let highlights_tree_item =
+ cx.new(|_| language_tools::HighlightsTreeToolbarItemView::new());
+ toolbar.add_item(highlights_tree_item, window, cx);
let migration_banner = cx.new(|cx| MigrationBanner::new(workspace, cx));
toolbar.add_item(migration_banner, window, cx);
let project_diff_toolbar = cx.new(|cx| ProjectDiffToolbar::new(workspace, cx));
@@ -4835,6 +4852,7 @@ mod tests {
"git_panel",
"git_picker",
"go_to_line",
+ "highlights_tree_view",
"icon_theme_selector",
"image_viewer",
"inline_assistant",
@@ -114,7 +114,8 @@ impl Render for QuickActionBar {
let selection_menu_enabled = editor_value.selection_menu_enabled(cx);
let inlay_hints_enabled = editor_value.inlay_hints_enabled();
let inline_values_enabled = editor_value.inline_values_enabled();
- let supports_diagnostics = editor_value.mode().is_full();
+ let semantic_highlights_enabled = editor_value.semantic_highlights_enabled();
+ let is_full = editor_value.mode().is_full();
let diagnostics_enabled = editor_value.diagnostics_max_severity != DiagnosticSeverity::Off;
let supports_inline_diagnostics = editor_value.inline_diagnostics_enabled();
let inline_diagnostics_enabled = editor_value.show_inline_diagnostics();
@@ -378,6 +379,29 @@ impl Render for QuickActionBar {
);
}
+ if is_full {
+ menu = menu.toggleable_entry(
+ "Semantic Highlights",
+ semantic_highlights_enabled,
+ IconPosition::Start,
+ Some(editor::actions::ToggleSemanticHighlights.boxed_clone()),
+ {
+ let editor = editor.clone();
+ move |window, cx| {
+ editor
+ .update(cx, |editor, cx| {
+ editor.toggle_semantic_highlights(
+ &editor::actions::ToggleSemanticHighlights,
+ window,
+ cx,
+ );
+ })
+ .ok();
+ }
+ },
+ );
+ }
+
if supports_minimap {
menu = menu.toggleable_entry("Minimap", minimap_enabled, IconPosition::Start, Some(editor::actions::ToggleMinimap.boxed_clone()), {
let editor = editor.clone();
@@ -426,7 +450,7 @@ impl Render for QuickActionBar {
menu = menu.separator();
- if supports_diagnostics {
+ if is_full {
menu = menu.toggleable_entry(
"Diagnostics",
diagnostics_enabled,
@@ -78,6 +78,7 @@
- [All Languages](./languages.md)
- [Configuring Languages](./configuring-languages.md)
- [Toolchains](./toolchains.md)
+ - [Semantic Tokens](./semantic-tokens.md)
- [Ansible](./languages/ansible.md)
- [AsciiDoc](./languages/asciidoc.md)
- [Astro](./languages/astro.md)
@@ -409,6 +409,22 @@ To create your own theme extension, refer to the [Developing Theme Extensions](.
## Using Language Server Features
+### Semantic Tokens
+
+Semantic tokens provide richer syntax highlighting by using type and scope information from language servers. Enable them with the `semantic_tokens` setting:
+
+```json [settings]
+"semantic_tokens": "combined"
+```
+
+- `"off"` β Tree-sitter highlighting only (default)
+- `"combined"` β LSP semantic tokens overlaid on tree-sitter
+- `"full"` β LSP semantic tokens replace tree-sitter entirely
+
+You can customize token colors and styles through `global_lsp_settings.semantic_token_rules` in your settings.
+
+β [Semantic Tokens documentation](./semantic-tokens.md)
+
### Inlay Hints
Inlay hints provide additional information inline in your code, such as parameter names or inferred types. Configure inlay hints in your `settings.json`:
@@ -405,6 +405,73 @@ impl zed::Extension for MyExtension {
You can customize the handling of the language server using several optional methods in the `Extension` trait. For example, you can control how completions are styled using the `label_for_completion` method. For a complete list of methods, see the [API docs for the Zed extension API](https://docs.rs/zed_extension_api).
+### Syntax Highlighting with Semantic Tokens
+
+Zed supports syntax highlighting using semantic tokens from the attached language servers. This is currently disabled by default, but can be enabled in your settings file:
+
+```json [settings]
+{
+ // Enable semantic tokens globally, backin with tree-sitter highlights for each language:
+ "semantic_tokens": "combined",
+ // Or, specify per-language:
+ "languages": {
+ "Rust": {
+ // No tree-sitter, only LSP semantic tokens:
+ "semantic_tokens": "full"
+ }
+ }
+}
+```
+
+The `semantic_tokens` setting accepts the following values:
+
+- `"off"` (default): Do not request semantic tokens from language servers.
+- `"combined"`: Use LSP semantic tokens together with tree-sitter highlighting.
+- `"full"`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
+
+#### Customizing Semantic Token Styles
+
+Zed supports customizing the styles used for semantic tokens. You can define rules in your settings file, which customize how semantic tokens get mapped to styles in your theme.
+
+```json [settings]
+{
+ "global_lsp_settings": {
+ "semantic_token_rules": [
+ {
+ // Highlight macros as keywords.
+ "token_type": "macro",
+ "style": ["syntax.keyword"]
+ },
+ {
+ // Highlight unresolved references in bold red.
+ "token_type": "unresolvedReference",
+ "foreground_color": "#c93f3f",
+ "font_weight": "bold"
+ },
+ {
+ // Underline all mutable variables/references/etc.
+ "token_modifiers": ["mutable"],
+ "underline": true
+ }
+ ]
+ }
+}
+```
+
+All rules that match a given `token_type` and `token_modifiers` are applied. Earlier rules take precedence. If no rules match, the token is not highlighted. User-defined rules take priority over the default rules.
+
+Each rule in the `semantic_token_rules` array is defined as follows:
+
+- `token_type`: The semantic token type as defined by the [LSP specification](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens). If omitted, the rule matches all token types.
+- `token_modifiers`: A list of semantic token modifiers to match. All modifiers must be present to match.
+- `style`: A list of styles from the current syntax theme to use. The first style found is used. Any settings below override that style.
+- `foreground_color`: The foreground color to use for the token type, in hex format (e.g., `"#ff0000"`).
+- `background_color`: The background color to use for the token type, in hex format (e.g., `"#ff0000"`).
+- `underline`: A boolean or color to underline with, in hex format. If `true`, then the token will be underlined with the text color.
+- `strikethrough`: A boolean or color to strikethrough with, in hex format. If `true`, then the token have a strikethrough with the text color.
+- `font_weight`: One of `"normal"`, `"bold"`.
+- `font_style`: One of `"normal"`, `"italic"`.
+
### Multi-Language Support
If your language server supports additional languages, you can use `language_ids` to map Zed `languages` to the desired [LSP-specific `languageId`](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem) identifiers:
@@ -2592,6 +2592,7 @@ The following settings can be overridden for each specific language:
- [`hard_tabs`](#hard-tabs)
- [`preferred_line_length`](#preferred-line-length)
- [`remove_trailing_whitespace_on_save`](#remove-trailing-whitespace-on-save)
+- [`semantic_tokens`](#semantic-tokens)
- [`show_edit_predictions`](#show-edit-predictions)
- [`show_whitespaces`](#show-whitespaces)
- [`whitespace_map`](#whitespace-map)
@@ -3297,6 +3298,40 @@ Non-negative `integer` values
2. `selection` only populate the search query when there is text selected
3. `never` never populate the search query
+## Semantic Tokens
+
+- Description: Controls how semantic tokens from language servers are used for syntax highlighting.
+- Setting: `semantic_tokens`
+- Default: `off`
+
+**Options**
+
+1. `off`: Do not request semantic tokens from language servers.
+2. `combined`: Use LSP semantic tokens together with tree-sitter highlighting.
+3. `full`: Use LSP semantic tokens exclusively, replacing tree-sitter highlighting.
+
+To enable semantic tokens globally:
+
+```json [settings]
+{
+ "semantic_tokens": "combined"
+}
+```
+
+To enable semantic tokens for a specific language:
+
+```json [settings]
+{
+ "languages": {
+ "Rust": {
+ "semantic_tokens": "full"
+ }
+ }
+}
+```
+
+May require language server restart to properly apply.
+
## Use Smartcase Search
- Description: When enabled, automatically adjusts search case sensitivity based on your query. If your search query contains any uppercase letters, the search becomes case-sensitive; if it contains only lowercase letters, the search becomes case-insensitive. \
@@ -0,0 +1,199 @@
+# Semantic Tokens
+
+Semantic tokens provide richer syntax highlighting by using information from language servers. Unlike tree-sitter highlighting, which is based purely on syntax, semantic tokens understand the meaning of your codeβdistinguishing between local variables and parameters, or between a class definition and a class reference.
+
+## Enabling Semantic Tokens
+
+Semantic tokens are controlled by the `semantic_tokens` setting. By default, semantic tokens are disabled.
+
+```json [settings]
+{
+ "semantic_tokens": "combined"
+}
+```
+
+This setting accepts three values:
+
+| Value | Description |
+| ------------ | ----------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| `"off"` | Do not request semantic tokens from language servers. Uses tree-sitter highlighting only. (Default) |
+| `"combined"` | Use LSP semantic tokens together with tree-sitter highlighting. Tree-sitter provides base highlighting, and semantic tokens overlay additional information. |
+| `"full"` | Use LSP semantic tokens exclusively. Tree-sitter highlighting is disabled entirely for buffers with semantic token support. |
+
+You can configure this globally or per-language:
+
+```json [settings]
+{
+ "semantic_tokens": "off",
+ "languages": {
+ "Rust": {
+ "semantic_tokens": "combined"
+ },
+ "TypeScript": {
+ "semantic_tokens": "full"
+ }
+ }
+}
+```
+
+> **Note:** Changing the `semantic_tokens` mode may require a language server restart to take effect. Use the `lsp: restart language servers` command from the command palette if highlighting doesn't update immediately.
+
+## Customizing Token Colors
+
+Semantic tokens are styled using rules that map LSP token types and modifiers to theme styles or custom colors. Zed provides sensible defaults, but you can customize these in your settings.json: add rules under `global_lsp_settings.semantic_token_rules` key.
+
+Rules are matched in order, and the first matching rule wins.
+User-defined rules take precedence over defaults.
+
+### Rule Structure
+
+Each rule can specify:
+
+| Property | Description |
+| ------------------ | ------------------------------------------------------------------------------------------------------------------ |
+| `token_type` | The LSP semantic token type to match (e.g., `"variable"`, `"function"`, `"class"`). If omitted, matches all types. |
+| `token_modifiers` | A list of modifiers that must all be present (e.g., `["declaration"]`, `["readonly", "static"]`). |
+| `style` | A list of theme style names to try. The first one found in the current theme is used. |
+| `foreground_color` | Override foreground color in hex format (e.g., `"#ff0000"`). |
+| `background_color` | Override background color in hex format. |
+| `underline` | Boolean or hex color. If `true`, underlines with the text color. |
+| `strikethrough` | Boolean or hex color. If `true`, strikes through with the text color. |
+| `font_weight` | `"normal"` or `"bold"`. |
+| `font_style` | `"normal"` or `"italic"`. |
+
+### Example: Highlighting Unresolved References
+
+To make unresolved references stand out:
+
+```json [settings]
+{
+ "global_lsp_settings": {
+ "semantic_token_rules": [
+ {
+ "token_type": "unresolvedReference",
+ "foreground_color": "#c93f3f",
+ "font_weight": "bold"
+ }
+ ]
+ }
+}
+```
+
+### Example: Highlighting Unsafe Code
+
+To highlight unsafe operations in Rust:
+
+```json [settings]
+{
+ "global_lsp_settings": {
+ "semantic_token_rules": [
+ {
+ "token_type": "punctuation",
+ "token_modifiers": ["unsafe"],
+ "foreground_color": "#AA1111",
+ "font_weight": "bold"
+ }
+ ]
+ }
+}
+```
+
+### Example: Using Theme Styles
+
+Instead of hardcoding colors, reference styles from your theme:
+
+```json [settings]
+{
+ "global_lsp_settings": {
+ "semantic_token_rules": [
+ {
+ "token_type": "variable",
+ "token_modifiers": ["mutable"],
+ "style": ["variable.mutable", "variable"]
+ }
+ ]
+ }
+}
+```
+
+The first style found in the current theme is used, providing fallback options.
+
+### Example: Disabling a Token Type
+
+To disable highlighting for a specific token type, add an empty rule that matches it:
+
+```json [settings]
+{
+ "global_lsp_settings": {
+ "semantic_token_rules": [
+ {
+ "token_type": "comment"
+ }
+ ]
+ }
+}
+```
+
+Since user rules are prepended to defaults and the first match wins, this empty rule prevents any styling from being applied to comment tokens.
+
+## Default Rules
+
+Zed's default semantic token rules map standard LSP token types to common theme styles. For example:
+
+- `function` β `function` style
+- `variable` with `constant` modifier β `constant` style
+- `class` β `type.class`, `class`, or `type` style (first found)
+- `comment` with `documentation` modifier β `comment.documentation` or `comment.doc` style
+
+The full default configuration can be shown in Zed with the `zed: show default semantic token rules` command.
+
+## Standard Token Types
+
+Language servers report tokens using standardized types. Common types include:
+
+| Type | Description |
+| --------------- | ---------------------------------- |
+| `namespace` | Namespace or module names |
+| `type` | Type names |
+| `class` | Class names |
+| `enum` | Enum type names |
+| `interface` | Interface names |
+| `struct` | Struct names |
+| `typeParameter` | Generic type parameters |
+| `parameter` | Function/method parameters |
+| `variable` | Variable names |
+| `property` | Object properties or struct fields |
+| `enumMember` | Enum variants |
+| `function` | Function names |
+| `method` | Method names |
+| `macro` | Macro names |
+| `keyword` | Language keywords |
+| `comment` | Comments |
+| `string` | String literals |
+| `number` | Numeric literals |
+| `operator` | Operators |
+
+Common modifiers include: `declaration`, `definition`, `readonly`, `static`, `deprecated`, `async`, `documentation`, `defaultLibrary`, and language-specific modifiers like `unsafe` (Rust) or `abstract` (TypeScript).
+
+For the complete specification, see the [LSP Semantic Tokens documentation](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#semanticTokenTypes).
+
+## Inspecting Semantic Tokens
+
+To see semantic tokens applied to your code in real-time, use the `dev: open highlights tree view` command from the command palette. This opens a panel showing all highlights (including semantic tokens) for the current buffer, making it easier to understand which tokens are being applied and debug your custom rules.
+
+## Troubleshooting
+
+### Semantic highlighting not appearing
+
+1. Ensure `semantic_tokens` is set to `"combined"` or `"full"` for the language
+2. Verify the language server supports semantic tokens (not all do)
+3. Try restarting the language server with `lsp: restart language servers`
+4. Check the LSP logs (`workspace: open lsp log`) for errors
+
+### Colors not updating after changing settings
+
+Changes to `semantic_tokens` mode may require a language server restart. Use `lsp: restart language servers` from the command palette.
+
+### Theme styles not being applied
+
+Ensure the style names in your rules match styles defined in your theme. The `style` array provides fallback optionsβif the first style isn't found, Zed tries the next one.