1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::LanguageSettings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_buffers(cx)
152 .into_iter()
153 .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
159 .semantic_tokens
160 .enabled()
161 {
162 Some((editor_buffer_id, editor_buffer))
163 } else {
164 None
165 }
166 })
167 .collect::<HashMap<_, _>>();
168
169 for buffer_with_disabled_tokens in self
170 .display_map
171 .read(cx)
172 .semantic_token_highlights
173 .keys()
174 .copied()
175 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
176 .filter(|buffer_id| {
177 !self
178 .buffer
179 .read(cx)
180 .buffer(*buffer_id)
181 .is_some_and(|buffer| {
182 let buffer = buffer.read(cx);
183 LanguageSettings::for_buffer(&buffer, cx)
184 .semantic_tokens
185 .enabled()
186 })
187 })
188 .collect::<Vec<_>>()
189 {
190 self.semantic_token_state
191 .invalidate_buffer(&buffer_with_disabled_tokens);
192 self.display_map.update(cx, |display_map, _| {
193 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
194 });
195 }
196
197 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
198 cx.background_executor()
199 .timer(Duration::from_millis(50))
200 .await;
201 let Some(all_semantic_tokens_task) = editor
202 .update(cx, |editor, cx| {
203 buffers_to_query
204 .into_iter()
205 .filter_map(|(buffer_id, buffer)| {
206 let known_version = editor
207 .semantic_token_state
208 .fetched_for_buffers
209 .get(&buffer_id);
210 let query_version = buffer.read(cx).version();
211 if known_version.is_some_and(|known_version| {
212 !query_version.changed_since(known_version)
213 }) {
214 None
215 } else {
216 sema.semantic_tokens(buffer, for_server, cx).map(
217 |task| async move { (buffer_id, query_version, task.await) },
218 )
219 }
220 })
221 .collect::<Vec<_>>()
222 })
223 .ok()
224 else {
225 return;
226 };
227
228 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
229 editor
230 .update(cx, |editor, cx| {
231 editor.display_map.update(cx, |display_map, _| {
232 for buffer_id in invalidate_semantic_highlights_for_buffers {
233 display_map.invalidate_semantic_highlights(buffer_id);
234 editor.semantic_token_state.invalidate_buffer(&buffer_id);
235 }
236 });
237
238 if all_semantic_tokens.is_empty() {
239 return;
240 }
241 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
242
243 for (buffer_id, query_version, tokens) in all_semantic_tokens {
244 let tokens = match tokens {
245 Ok(BufferSemanticTokens {
246 tokens: Some(tokens),
247 }) => tokens,
248 Ok(BufferSemanticTokens { tokens: None }) => {
249 editor.display_map.update(cx, |display_map, _| {
250 display_map.invalidate_semantic_highlights(buffer_id);
251 });
252 continue;
253 }
254 Err(e) => {
255 log::error!(
256 "Failed to fetch semantic tokens for buffer \
257 {buffer_id:?}: {e:#}"
258 );
259 continue;
260 }
261 };
262
263 match editor
264 .semantic_token_state
265 .fetched_for_buffers
266 .entry(buffer_id)
267 {
268 hash_map::Entry::Occupied(mut o) => {
269 if query_version.changed_since(o.get()) {
270 o.insert(query_version);
271 } else {
272 continue;
273 }
274 }
275 hash_map::Entry::Vacant(v) => {
276 v.insert(query_version);
277 }
278 }
279
280 let language_name = editor
281 .buffer()
282 .read(cx)
283 .buffer(buffer_id)
284 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
285
286 editor.display_map.update(cx, |display_map, cx| {
287 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
288 let mut token_highlights = Vec::new();
289 let mut interner = HighlightStyleInterner::default();
290 for (server_id, server_tokens) in tokens {
291 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
292 server_id,
293 language_name.as_ref(),
294 cx,
295 ) else {
296 continue;
297 };
298 token_highlights.reserve(2 * server_tokens.len());
299 token_highlights.extend(buffer_into_editor_highlights(
300 &server_tokens,
301 stylizer,
302 &multi_buffer_snapshot,
303 &mut interner,
304 cx,
305 ));
306 }
307
308 token_highlights.sort_by(|a, b| {
309 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
310 });
311 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
312 buffer_id,
313 (Arc::from(token_highlights), Arc::new(interner)),
314 );
315 });
316 });
317 }
318
319 cx.notify();
320 })
321 .ok();
322 });
323 }
324}
325
326fn buffer_into_editor_highlights<'a, 'b>(
327 buffer_tokens: &'a [BufferSemanticToken],
328 stylizer: &'a SemanticTokenStylizer,
329 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
330 interner: &'b mut HighlightStyleInterner,
331 cx: &'a App,
332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
333 multi_buffer_snapshot
334 .text_anchors_to_visible_anchors(
335 buffer_tokens
336 .iter()
337 .flat_map(|token| [token.range.start, token.range.end]),
338 )
339 .into_iter()
340 .tuples::<(_, _)>()
341 .zip(buffer_tokens)
342 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
343 let range = multi_buffer_start?..multi_buffer_end?;
344 let style = convert_token(
345 stylizer,
346 cx.theme().syntax(),
347 token.token_type,
348 token.token_modifiers,
349 )?;
350 let style = interner.intern(style);
351 Some(SemanticTokenHighlight {
352 range,
353 style,
354 token_type: token.token_type,
355 token_modifiers: token.token_modifiers,
356 server_id: stylizer.server_id(),
357 })
358 })
359}
360
361fn convert_token(
362 stylizer: &SemanticTokenStylizer,
363 theme: &SyntaxTheme,
364 token_type: TokenType,
365 modifiers: u32,
366) -> Option<HighlightStyle> {
367 let rules = stylizer.rules_for_token(token_type)?;
368 let matching: Vec<_> = rules
369 .iter()
370 .filter(|rule| {
371 rule.token_modifiers
372 .iter()
373 .all(|m| stylizer.has_modifier(modifiers, m))
374 })
375 .collect();
376
377 if let Some(rule) = matching.last() {
378 if rule.no_style_defined() {
379 return None;
380 }
381 }
382
383 let mut highlight = HighlightStyle::default();
384 let mut empty = true;
385
386 for rule in matching {
387 empty = false;
388
389 let style = rule
390 .style
391 .iter()
392 .find_map(|style| theme.style_for_name(style));
393
394 macro_rules! overwrite {
395 (
396 highlight.$highlight_field:ident,
397 SemanticTokenRule::$rule_field:ident,
398 $transform:expr $(,)?
399 ) => {
400 highlight.$highlight_field = rule
401 .$rule_field
402 .map($transform)
403 .or_else(|| style.and_then(|s| s.$highlight_field))
404 .or(highlight.$highlight_field)
405 };
406 }
407
408 overwrite!(
409 highlight.color,
410 SemanticTokenRule::foreground_color,
411 Into::into,
412 );
413
414 overwrite!(
415 highlight.background_color,
416 SemanticTokenRule::background_color,
417 Into::into,
418 );
419
420 overwrite!(
421 highlight.font_weight,
422 SemanticTokenRule::font_weight,
423 |w| match w {
424 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
425 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
426 },
427 );
428
429 overwrite!(
430 highlight.font_style,
431 SemanticTokenRule::font_style,
432 |s| match s {
433 SemanticTokenFontStyle::Normal => FontStyle::Normal,
434 SemanticTokenFontStyle::Italic => FontStyle::Italic,
435 },
436 );
437
438 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
439 UnderlineStyle {
440 thickness: 1.0.into(),
441 color: match u {
442 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
443 SemanticTokenColorOverride::InheritForeground(false) => None,
444 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
445 },
446 ..UnderlineStyle::default()
447 }
448 });
449
450 overwrite!(
451 highlight.strikethrough,
452 SemanticTokenRule::strikethrough,
453 |s| StrikethroughStyle {
454 thickness: 1.0.into(),
455 color: match s {
456 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
457 SemanticTokenColorOverride::InheritForeground(false) => None,
458 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
459 },
460 },
461 );
462 }
463
464 if empty { None } else { Some(highlight) }
465}
466
467#[cfg(test)]
468mod tests {
469 use std::{
470 ops::Range,
471 sync::atomic::{self, AtomicUsize},
472 };
473
474 use futures::StreamExt as _;
475 use gpui::{
476 AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
477 };
478 use language::{
479 Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageAwareStyling, LanguageConfig,
480 LanguageMatcher,
481 };
482 use languages::FakeLspAdapter;
483 use lsp::LanguageServerId;
484 use multi_buffer::{
485 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
486 };
487 use project::Project;
488 use rope::{Point, PointUtf16};
489 use serde_json::json;
490 use settings::{
491 GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
492 SemanticTokens, SettingsStore,
493 };
494 use workspace::{MultiWorkspace, WorkspaceHandle as _};
495
496 use crate::{
497 Capability,
498 editor_tests::{init_test, update_test_language_settings},
499 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
500 };
501
502 use super::*;
503
504 #[gpui::test]
505 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
506 init_test(cx, |_| {});
507
508 update_test_language_settings(cx, &|language_settings| {
509 language_settings.languages.0.insert(
510 "Rust".into(),
511 LanguageSettingsContent {
512 semantic_tokens: Some(SemanticTokens::Full),
513 ..LanguageSettingsContent::default()
514 },
515 );
516 });
517
518 let mut cx = EditorLspTestContext::new_rust(
519 lsp::ServerCapabilities {
520 semantic_tokens_provider: Some(
521 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
522 lsp::SemanticTokensOptions {
523 legend: lsp::SemanticTokensLegend {
524 token_types: vec!["function".into()],
525 token_modifiers: Vec::new(),
526 },
527 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
528 ..lsp::SemanticTokensOptions::default()
529 },
530 ),
531 ),
532 ..lsp::ServerCapabilities::default()
533 },
534 cx,
535 )
536 .await;
537
538 let full_counter = Arc::new(AtomicUsize::new(0));
539 let full_counter_clone = full_counter.clone();
540
541 let mut full_request = cx
542 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
543 move |_, _, _| {
544 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
545 async move {
546 Ok(Some(lsp::SemanticTokensResult::Tokens(
547 lsp::SemanticTokens {
548 data: vec![
549 0, // delta_line
550 3, // delta_start
551 4, // length
552 0, // token_type
553 0, // token_modifiers_bitset
554 ],
555 // The server isn't capable of deltas, so even though we sent back
556 // a result ID, the client shouldn't request a delta.
557 result_id: Some("a".into()),
558 },
559 )))
560 }
561 },
562 );
563
564 cx.set_state("ˇfn main() {}");
565 assert!(full_request.next().await.is_some());
566
567 cx.run_until_parked();
568
569 cx.set_state("ˇfn main() { a }");
570 assert!(full_request.next().await.is_some());
571
572 cx.run_until_parked();
573
574 assert_eq!(
575 extract_semantic_highlights(&cx.editor, &cx),
576 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
577 );
578
579 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
580 }
581
582 #[gpui::test]
583 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
584 init_test(cx, |_| {});
585
586 update_test_language_settings(cx, &|language_settings| {
587 language_settings.languages.0.insert(
588 "Rust".into(),
589 LanguageSettingsContent {
590 semantic_tokens: Some(SemanticTokens::Full),
591 ..LanguageSettingsContent::default()
592 },
593 );
594 });
595
596 let mut cx = EditorLspTestContext::new_rust(
597 lsp::ServerCapabilities {
598 semantic_tokens_provider: Some(
599 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
600 lsp::SemanticTokensOptions {
601 legend: lsp::SemanticTokensLegend {
602 token_types: vec!["function".into()],
603 token_modifiers: Vec::new(),
604 },
605 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
606 ..lsp::SemanticTokensOptions::default()
607 },
608 ),
609 ),
610 ..lsp::ServerCapabilities::default()
611 },
612 cx,
613 )
614 .await;
615
616 let full_counter = Arc::new(AtomicUsize::new(0));
617 let full_counter_clone = full_counter.clone();
618
619 let mut full_request = cx
620 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
621 move |_, _, _| {
622 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
623 async move {
624 Ok(Some(lsp::SemanticTokensResult::Tokens(
625 lsp::SemanticTokens {
626 data: vec![
627 0, // delta_line
628 3, // delta_start
629 4, // length
630 0, // token_type
631 0, // token_modifiers_bitset
632 ],
633 result_id: None, // Sending back `None` forces the client to not use deltas.
634 },
635 )))
636 }
637 },
638 );
639
640 cx.set_state("ˇfn main() {}");
641 assert!(full_request.next().await.is_some());
642
643 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
644 task.await;
645
646 cx.set_state("ˇfn main() { a }");
647 assert!(full_request.next().await.is_some());
648
649 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
650 task.await;
651 assert_eq!(
652 extract_semantic_highlights(&cx.editor, &cx),
653 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
654 );
655 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
656 }
657
658 #[gpui::test]
659 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
660 init_test(cx, |_| {});
661
662 update_test_language_settings(cx, &|language_settings| {
663 language_settings.languages.0.insert(
664 "Rust".into(),
665 LanguageSettingsContent {
666 semantic_tokens: Some(SemanticTokens::Full),
667 ..LanguageSettingsContent::default()
668 },
669 );
670 });
671
672 let mut cx = EditorLspTestContext::new_rust(
673 lsp::ServerCapabilities {
674 semantic_tokens_provider: Some(
675 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
676 lsp::SemanticTokensOptions {
677 legend: lsp::SemanticTokensLegend {
678 token_types: vec!["function".into()],
679 token_modifiers: Vec::new(),
680 },
681 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
682 ..lsp::SemanticTokensOptions::default()
683 },
684 ),
685 ),
686 ..lsp::ServerCapabilities::default()
687 },
688 cx,
689 )
690 .await;
691
692 let full_counter = Arc::new(AtomicUsize::new(0));
693 let full_counter_clone = full_counter.clone();
694 let delta_counter = Arc::new(AtomicUsize::new(0));
695 let delta_counter_clone = delta_counter.clone();
696
697 let mut full_request = cx
698 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
699 move |_, _, _| {
700 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
701 async move {
702 Ok(Some(lsp::SemanticTokensResult::Tokens(
703 lsp::SemanticTokens {
704 data: vec![
705 0, // delta_line
706 3, // delta_start
707 4, // length
708 0, // token_type
709 0, // token_modifiers_bitset
710 ],
711 result_id: Some("a".into()),
712 },
713 )))
714 }
715 },
716 );
717
718 let mut delta_request = cx
719 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
720 move |_, params, _| {
721 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
722 assert_eq!(params.previous_result_id, "a");
723 async move {
724 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
725 lsp::SemanticTokensDelta {
726 edits: Vec::new(),
727 result_id: Some("b".into()),
728 },
729 )))
730 }
731 },
732 );
733
734 // Initial request, for the empty buffer.
735 cx.set_state("ˇfn main() {}");
736 assert!(full_request.next().await.is_some());
737 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
738 task.await;
739
740 cx.set_state("ˇfn main() { a }");
741 assert!(delta_request.next().await.is_some());
742 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
743 task.await;
744
745 assert_eq!(
746 extract_semantic_highlights(&cx.editor, &cx),
747 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
748 );
749
750 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
751 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
752 }
753
754 #[gpui::test]
755 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
756 init_test(cx, |_| {});
757
758 update_test_language_settings(cx, &|language_settings| {
759 language_settings.languages.0.insert(
760 "TOML".into(),
761 LanguageSettingsContent {
762 semantic_tokens: Some(SemanticTokens::Full),
763 ..LanguageSettingsContent::default()
764 },
765 );
766 });
767
768 let toml_language = Arc::new(Language::new(
769 LanguageConfig {
770 name: "TOML".into(),
771 matcher: LanguageMatcher {
772 path_suffixes: vec!["toml".into()],
773 ..LanguageMatcher::default()
774 },
775 ..LanguageConfig::default()
776 },
777 None,
778 ));
779
780 // We have 2 language servers for TOML in this test.
781 let toml_legend_1 = lsp::SemanticTokensLegend {
782 token_types: vec!["property".into()],
783 token_modifiers: Vec::new(),
784 };
785 let toml_legend_2 = lsp::SemanticTokensLegend {
786 token_types: vec!["number".into()],
787 token_modifiers: Vec::new(),
788 };
789
790 let app_state = cx.update(workspace::AppState::test);
791
792 cx.update(|cx| {
793 assets::Assets.load_test_fonts(cx);
794 crate::init(cx);
795 workspace::init(app_state.clone(), cx);
796 });
797
798 let project = Project::test(app_state.fs.clone(), [], cx).await;
799 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
800
801 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
802 let full_counter_toml_1_clone = full_counter_toml_1.clone();
803 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
804 let full_counter_toml_2_clone = full_counter_toml_2.clone();
805
806 let mut toml_server_1 = language_registry.register_fake_lsp(
807 toml_language.name(),
808 FakeLspAdapter {
809 name: "toml1",
810 capabilities: lsp::ServerCapabilities {
811 semantic_tokens_provider: Some(
812 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
813 lsp::SemanticTokensOptions {
814 legend: toml_legend_1,
815 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
816 ..lsp::SemanticTokensOptions::default()
817 },
818 ),
819 ),
820 ..lsp::ServerCapabilities::default()
821 },
822 initializer: Some(Box::new({
823 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
824 move |fake_server| {
825 let full_counter = full_counter_toml_1_clone.clone();
826 fake_server
827 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
828 move |_, _| {
829 full_counter.fetch_add(1, atomic::Ordering::Release);
830 async move {
831 Ok(Some(lsp::SemanticTokensResult::Tokens(
832 lsp::SemanticTokens {
833 // highlight 'a' as a property
834 data: vec![
835 0, // delta_line
836 0, // delta_start
837 1, // length
838 0, // token_type
839 0, // token_modifiers_bitset
840 ],
841 result_id: Some("a".into()),
842 },
843 )))
844 }
845 },
846 );
847 }
848 })),
849 ..FakeLspAdapter::default()
850 },
851 );
852 let mut toml_server_2 = language_registry.register_fake_lsp(
853 toml_language.name(),
854 FakeLspAdapter {
855 name: "toml2",
856 capabilities: lsp::ServerCapabilities {
857 semantic_tokens_provider: Some(
858 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
859 lsp::SemanticTokensOptions {
860 legend: toml_legend_2,
861 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
862 ..lsp::SemanticTokensOptions::default()
863 },
864 ),
865 ),
866 ..lsp::ServerCapabilities::default()
867 },
868 initializer: Some(Box::new({
869 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
870 move |fake_server| {
871 let full_counter = full_counter_toml_2_clone.clone();
872 fake_server
873 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
874 move |_, _| {
875 full_counter.fetch_add(1, atomic::Ordering::Release);
876 async move {
877 Ok(Some(lsp::SemanticTokensResult::Tokens(
878 lsp::SemanticTokens {
879 // highlight '3' as a literal
880 data: vec![
881 0, // delta_line
882 4, // delta_start
883 1, // length
884 0, // token_type
885 0, // token_modifiers_bitset
886 ],
887 result_id: Some("a".into()),
888 },
889 )))
890 }
891 },
892 );
893 }
894 })),
895 ..FakeLspAdapter::default()
896 },
897 );
898 language_registry.add(toml_language.clone());
899
900 app_state
901 .fs
902 .as_fake()
903 .insert_tree(
904 EditorLspTestContext::root_path(),
905 json!({
906 ".git": {},
907 "dir": {
908 "foo.toml": "a = 1\nb = 2\n",
909 }
910 }),
911 )
912 .await;
913
914 let (multi_workspace, cx) =
915 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
916 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
917 project
918 .update(cx, |project, cx| {
919 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
920 })
921 .await
922 .unwrap();
923 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
924 .await;
925
926 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
927 let toml_item = workspace
928 .update_in(cx, |workspace, window, cx| {
929 workspace.open_path(toml_file, None, true, window, cx)
930 })
931 .await
932 .expect("Could not open test file");
933
934 let editor = cx.update(|_, cx| {
935 toml_item
936 .act_as::<Editor>(cx)
937 .expect("Opened test file wasn't an editor")
938 });
939
940 editor.update_in(cx, |editor, window, cx| {
941 let nav_history = workspace
942 .read(cx)
943 .active_pane()
944 .read(cx)
945 .nav_history_for_item(&cx.entity());
946 editor.set_nav_history(Some(nav_history));
947 window.focus(&editor.focus_handle(cx), cx)
948 });
949
950 let _toml_server_1 = toml_server_1.next().await.unwrap();
951 let _toml_server_2 = toml_server_2.next().await.unwrap();
952
953 // Trigger semantic tokens.
954 editor.update_in(cx, |editor, _, cx| {
955 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
956 });
957 cx.executor().advance_clock(Duration::from_millis(200));
958 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
959 cx.run_until_parked();
960 task.await;
961
962 assert_eq!(
963 extract_semantic_highlights(&editor, &cx),
964 vec![
965 MultiBufferOffset(0)..MultiBufferOffset(1),
966 MultiBufferOffset(4)..MultiBufferOffset(5),
967 ]
968 );
969
970 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
971 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
972 }
973
974 #[gpui::test]
975 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
976 init_test(cx, |_| {});
977
978 update_test_language_settings(cx, &|language_settings| {
979 language_settings.languages.0.insert(
980 "TOML".into(),
981 LanguageSettingsContent {
982 semantic_tokens: Some(SemanticTokens::Full),
983 ..LanguageSettingsContent::default()
984 },
985 );
986 language_settings.languages.0.insert(
987 "Rust".into(),
988 LanguageSettingsContent {
989 semantic_tokens: Some(SemanticTokens::Full),
990 ..LanguageSettingsContent::default()
991 },
992 );
993 });
994
995 let toml_language = Arc::new(Language::new(
996 LanguageConfig {
997 name: "TOML".into(),
998 matcher: LanguageMatcher {
999 path_suffixes: vec!["toml".into()],
1000 ..LanguageMatcher::default()
1001 },
1002 ..LanguageConfig::default()
1003 },
1004 None,
1005 ));
1006 let rust_language = Arc::new(Language::new(
1007 LanguageConfig {
1008 name: "Rust".into(),
1009 matcher: LanguageMatcher {
1010 path_suffixes: vec!["rs".into()],
1011 ..LanguageMatcher::default()
1012 },
1013 ..LanguageConfig::default()
1014 },
1015 None,
1016 ));
1017
1018 let toml_legend = lsp::SemanticTokensLegend {
1019 token_types: vec!["property".into()],
1020 token_modifiers: Vec::new(),
1021 };
1022 let rust_legend = lsp::SemanticTokensLegend {
1023 token_types: vec!["constant".into()],
1024 token_modifiers: Vec::new(),
1025 };
1026
1027 let app_state = cx.update(workspace::AppState::test);
1028
1029 cx.update(|cx| {
1030 assets::Assets.load_test_fonts(cx);
1031 crate::init(cx);
1032 workspace::init(app_state.clone(), cx);
1033 });
1034
1035 let project = Project::test(app_state.fs.clone(), [], cx).await;
1036 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1037 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1038 let full_counter_toml_clone = full_counter_toml.clone();
1039
1040 let mut toml_server = language_registry.register_fake_lsp(
1041 toml_language.name(),
1042 FakeLspAdapter {
1043 name: "toml",
1044 capabilities: lsp::ServerCapabilities {
1045 semantic_tokens_provider: Some(
1046 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1047 lsp::SemanticTokensOptions {
1048 legend: toml_legend,
1049 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1050 ..lsp::SemanticTokensOptions::default()
1051 },
1052 ),
1053 ),
1054 ..lsp::ServerCapabilities::default()
1055 },
1056 initializer: Some(Box::new({
1057 let full_counter_toml_clone = full_counter_toml_clone.clone();
1058 move |fake_server| {
1059 let full_counter = full_counter_toml_clone.clone();
1060 fake_server
1061 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1062 move |_, _| {
1063 full_counter.fetch_add(1, atomic::Ordering::Release);
1064 async move {
1065 Ok(Some(lsp::SemanticTokensResult::Tokens(
1066 lsp::SemanticTokens {
1067 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1068 data: vec![
1069 0, // delta_line (line 0)
1070 0, // delta_start
1071 1, // length
1072 0, // token_type
1073 0, // token_modifiers_bitset
1074 1, // delta_line (line 1)
1075 0, // delta_start
1076 1, // length
1077 0, // token_type
1078 0, // token_modifiers_bitset
1079 1, // delta_line (line 2)
1080 0, // delta_start
1081 1, // length
1082 0, // token_type
1083 0, // token_modifiers_bitset
1084 ],
1085 result_id: Some("a".into()),
1086 },
1087 )))
1088 }
1089 },
1090 );
1091 }
1092 })),
1093 ..FakeLspAdapter::default()
1094 },
1095 );
1096 language_registry.add(toml_language.clone());
1097 let mut rust_server = language_registry.register_fake_lsp(
1098 rust_language.name(),
1099 FakeLspAdapter {
1100 name: "rust",
1101 capabilities: lsp::ServerCapabilities {
1102 semantic_tokens_provider: Some(
1103 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1104 lsp::SemanticTokensOptions {
1105 legend: rust_legend,
1106 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1107 ..lsp::SemanticTokensOptions::default()
1108 },
1109 ),
1110 ),
1111 ..lsp::ServerCapabilities::default()
1112 },
1113 ..FakeLspAdapter::default()
1114 },
1115 );
1116 language_registry.add(rust_language.clone());
1117
1118 app_state
1119 .fs
1120 .as_fake()
1121 .insert_tree(
1122 EditorLspTestContext::root_path(),
1123 json!({
1124 ".git": {},
1125 "dir": {
1126 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1127 "bar.rs": "const c: usize = 3;\n",
1128 }
1129 }),
1130 )
1131 .await;
1132
1133 let (multi_workspace, cx) =
1134 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1135 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1136 project
1137 .update(cx, |project, cx| {
1138 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1139 })
1140 .await
1141 .unwrap();
1142 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1143 .await;
1144
1145 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1146 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1147 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1148 (
1149 workspace.open_path(toml_file, None, true, window, cx),
1150 workspace.open_path(rust_file, None, true, window, cx),
1151 )
1152 });
1153 let toml_item = toml_item.await.expect("Could not open test file");
1154 let rust_item = rust_item.await.expect("Could not open test file");
1155
1156 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1157 (
1158 toml_item
1159 .act_as::<Editor>(cx)
1160 .expect("Opened test file wasn't an editor"),
1161 rust_item
1162 .act_as::<Editor>(cx)
1163 .expect("Opened test file wasn't an editor"),
1164 )
1165 });
1166 let toml_buffer = cx.read(|cx| {
1167 toml_editor
1168 .read(cx)
1169 .buffer()
1170 .read(cx)
1171 .as_singleton()
1172 .unwrap()
1173 });
1174 let rust_buffer = cx.read(|cx| {
1175 rust_editor
1176 .read(cx)
1177 .buffer()
1178 .read(cx)
1179 .as_singleton()
1180 .unwrap()
1181 });
1182 let multibuffer = cx.new(|cx| {
1183 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1184 multibuffer.set_excerpts_for_path(
1185 PathKey::sorted(0),
1186 toml_buffer.clone(),
1187 [Point::new(0, 0)..Point::new(0, 4)],
1188 0,
1189 cx,
1190 );
1191 multibuffer.set_excerpts_for_path(
1192 PathKey::sorted(1),
1193 rust_buffer.clone(),
1194 [Point::new(0, 0)..Point::new(0, 4)],
1195 0,
1196 cx,
1197 );
1198 multibuffer
1199 });
1200
1201 let editor = workspace.update_in(cx, |workspace, window, cx| {
1202 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1203 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1204 editor
1205 });
1206 editor.update_in(cx, |editor, window, cx| {
1207 let nav_history = workspace
1208 .read(cx)
1209 .active_pane()
1210 .read(cx)
1211 .nav_history_for_item(&cx.entity());
1212 editor.set_nav_history(Some(nav_history));
1213 window.focus(&editor.focus_handle(cx), cx)
1214 });
1215
1216 let _toml_server = toml_server.next().await.unwrap();
1217 let _rust_server = rust_server.next().await.unwrap();
1218
1219 // Initial request.
1220 cx.executor().advance_clock(Duration::from_millis(200));
1221 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1222 cx.run_until_parked();
1223 task.await;
1224 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1225 cx.run_until_parked();
1226
1227 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1228 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1229 assert_eq!(
1230 extract_semantic_highlights(&editor, &cx),
1231 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1232 );
1233
1234 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1235 let toml_anchor = editor.read_with(cx, |editor, cx| {
1236 editor
1237 .buffer()
1238 .read(cx)
1239 .snapshot(cx)
1240 .anchor_in_excerpt(text::Anchor::min_for_buffer(
1241 toml_buffer.read(cx).remote_id(),
1242 ))
1243 .unwrap()
1244 });
1245 editor.update_in(cx, |editor, _, cx| {
1246 editor.buffer().update(cx, |buffer, cx| {
1247 buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
1248 });
1249 });
1250
1251 // Wait for semantic tokens to be re-fetched after expansion.
1252 cx.executor().advance_clock(Duration::from_millis(200));
1253 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1254 cx.run_until_parked();
1255 task.await;
1256
1257 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1258 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1259 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1260 assert_eq!(
1261 extract_semantic_highlights(&editor, &cx),
1262 vec![
1263 MultiBufferOffset(0)..MultiBufferOffset(1),
1264 MultiBufferOffset(6)..MultiBufferOffset(7),
1265 MultiBufferOffset(12)..MultiBufferOffset(13),
1266 ]
1267 );
1268 }
1269
1270 fn extract_semantic_highlights(
1271 editor: &Entity<Editor>,
1272 cx: &TestAppContext,
1273 ) -> Vec<Range<MultiBufferOffset>> {
1274 editor.read_with(cx, |editor, cx| {
1275 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1276 editor
1277 .display_map
1278 .read(cx)
1279 .semantic_token_highlights
1280 .iter()
1281 .flat_map(|(_, (v, _))| v.iter())
1282 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1283 .collect()
1284 })
1285 }
1286
1287 #[gpui::test]
1288 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1289 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1290 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1291
1292 init_test(cx, |_| {});
1293
1294 update_test_language_settings(cx, &|language_settings| {
1295 language_settings.languages.0.insert(
1296 "Rust".into(),
1297 LanguageSettingsContent {
1298 semantic_tokens: Some(SemanticTokens::Full),
1299 ..LanguageSettingsContent::default()
1300 },
1301 );
1302 });
1303
1304 let mut cx = EditorLspTestContext::new_rust(
1305 lsp::ServerCapabilities {
1306 semantic_tokens_provider: Some(
1307 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1308 lsp::SemanticTokensOptions {
1309 legend: lsp::SemanticTokensLegend {
1310 token_types: Vec::from(["function".into()]),
1311 token_modifiers: Vec::new(),
1312 },
1313 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1314 ..lsp::SemanticTokensOptions::default()
1315 },
1316 ),
1317 ),
1318 ..lsp::ServerCapabilities::default()
1319 },
1320 cx,
1321 )
1322 .await;
1323
1324 let mut full_request = cx
1325 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1326 move |_, _, _| {
1327 async move {
1328 Ok(Some(lsp::SemanticTokensResult::Tokens(
1329 lsp::SemanticTokens {
1330 data: vec![
1331 0, // delta_line
1332 3, // delta_start
1333 4, // length
1334 0, // token_type (function)
1335 0, // token_modifiers_bitset
1336 ],
1337 result_id: None,
1338 },
1339 )))
1340 }
1341 },
1342 );
1343
1344 // Trigger initial semantic tokens fetch
1345 cx.set_state("ˇfn main() {}");
1346 full_request.next().await;
1347 cx.run_until_parked();
1348
1349 // Verify initial highlights exist (with no custom color yet)
1350 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1351 assert_eq!(
1352 initial_ranges,
1353 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1354 "Should have initial semantic token highlights"
1355 );
1356 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1357 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1358 // Initial color should be None or theme default (not red or blue)
1359 let initial_color = initial_styles[0].color;
1360
1361 // Set a custom foreground color for function tokens via settings.json
1362 let red_color = Rgba {
1363 r: 1.0,
1364 g: 0.0,
1365 b: 0.0,
1366 a: 1.0,
1367 };
1368 cx.update(|_, cx| {
1369 SettingsStore::update_global(cx, |store, cx| {
1370 store.update_user_settings(cx, |settings| {
1371 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1372 semantic_token_rules: Some(SemanticTokenRules {
1373 rules: Vec::from([SemanticTokenRule {
1374 token_type: Some("function".to_string()),
1375 foreground_color: Some(red_color),
1376 ..SemanticTokenRule::default()
1377 }]),
1378 }),
1379 ..GlobalLspSettingsContent::default()
1380 });
1381 });
1382 });
1383 });
1384
1385 // Trigger a refetch by making an edit (which forces semantic tokens update)
1386 cx.set_state("ˇfn main() { }");
1387 full_request.next().await;
1388 cx.run_until_parked();
1389
1390 // Verify the highlights now have the custom red color
1391 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1392 assert_eq!(
1393 styles_after_settings_change.len(),
1394 1,
1395 "Should still have one highlight"
1396 );
1397 assert_eq!(
1398 styles_after_settings_change[0].color,
1399 Some(Hsla::from(red_color)),
1400 "Highlight should have the custom red color from settings.json"
1401 );
1402 assert_ne!(
1403 styles_after_settings_change[0].color, initial_color,
1404 "Color should have changed from initial"
1405 );
1406 }
1407
1408 #[gpui::test]
1409 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1410 use collections::IndexMap;
1411 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1412 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1413
1414 init_test(cx, |_| {});
1415
1416 update_test_language_settings(cx, &|language_settings| {
1417 language_settings.languages.0.insert(
1418 "Rust".into(),
1419 LanguageSettingsContent {
1420 semantic_tokens: Some(SemanticTokens::Full),
1421 ..LanguageSettingsContent::default()
1422 },
1423 );
1424 });
1425
1426 let mut cx = EditorLspTestContext::new_rust(
1427 lsp::ServerCapabilities {
1428 semantic_tokens_provider: Some(
1429 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1430 lsp::SemanticTokensOptions {
1431 legend: lsp::SemanticTokensLegend {
1432 token_types: Vec::from(["function".into()]),
1433 token_modifiers: Vec::new(),
1434 },
1435 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1436 ..lsp::SemanticTokensOptions::default()
1437 },
1438 ),
1439 ),
1440 ..lsp::ServerCapabilities::default()
1441 },
1442 cx,
1443 )
1444 .await;
1445
1446 let mut full_request = cx
1447 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1448 move |_, _, _| async move {
1449 Ok(Some(lsp::SemanticTokensResult::Tokens(
1450 lsp::SemanticTokens {
1451 data: vec![
1452 0, // delta_line
1453 3, // delta_start
1454 4, // length
1455 0, // token_type (function)
1456 0, // token_modifiers_bitset
1457 ],
1458 result_id: None,
1459 },
1460 )))
1461 },
1462 );
1463
1464 cx.set_state("ˇfn main() {}");
1465 full_request.next().await;
1466 cx.run_until_parked();
1467
1468 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1469 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1470 let initial_color = initial_styles[0].color;
1471
1472 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1473 // which fires theme_changed → refresh_semantic_token_highlights.
1474 let red_color: Hsla = Rgba {
1475 r: 1.0,
1476 g: 0.0,
1477 b: 0.0,
1478 a: 1.0,
1479 }
1480 .into();
1481 cx.update(|_, cx| {
1482 SettingsStore::update_global(cx, |store, cx| {
1483 store.update_user_settings(cx, |settings| {
1484 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1485 syntax: IndexMap::from_iter([(
1486 "function".to_string(),
1487 HighlightStyleContent {
1488 color: Some("#ff0000".to_string()),
1489 background_color: None,
1490 font_style: None,
1491 font_weight: None,
1492 },
1493 )]),
1494 ..ThemeStyleContent::default()
1495 });
1496 });
1497 });
1498 });
1499
1500 cx.executor().advance_clock(Duration::from_millis(200));
1501 cx.run_until_parked();
1502
1503 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1504 assert_eq!(styles_after_override.len(), 1);
1505 assert_eq!(
1506 styles_after_override[0].color,
1507 Some(red_color),
1508 "Highlight should have red color from theme override"
1509 );
1510 assert_ne!(
1511 styles_after_override[0].color, initial_color,
1512 "Color should have changed from initial"
1513 );
1514
1515 // Changing the override to a different color also restyles.
1516 let blue_color: Hsla = Rgba {
1517 r: 0.0,
1518 g: 0.0,
1519 b: 1.0,
1520 a: 1.0,
1521 }
1522 .into();
1523 cx.update(|_, cx| {
1524 SettingsStore::update_global(cx, |store, cx| {
1525 store.update_user_settings(cx, |settings| {
1526 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1527 syntax: IndexMap::from_iter([(
1528 "function".to_string(),
1529 HighlightStyleContent {
1530 color: Some("#0000ff".to_string()),
1531 background_color: None,
1532 font_style: None,
1533 font_weight: None,
1534 },
1535 )]),
1536 ..ThemeStyleContent::default()
1537 });
1538 });
1539 });
1540 });
1541
1542 cx.executor().advance_clock(Duration::from_millis(200));
1543 cx.run_until_parked();
1544
1545 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1546 assert_eq!(styles_after_second_override.len(), 1);
1547 assert_eq!(
1548 styles_after_second_override[0].color,
1549 Some(blue_color),
1550 "Highlight should have blue color from updated theme override"
1551 );
1552
1553 // Removing overrides reverts to the original theme color.
1554 cx.update(|_, cx| {
1555 SettingsStore::update_global(cx, |store, cx| {
1556 store.update_user_settings(cx, |settings| {
1557 settings.theme.experimental_theme_overrides = None;
1558 });
1559 });
1560 });
1561
1562 cx.executor().advance_clock(Duration::from_millis(200));
1563 cx.run_until_parked();
1564
1565 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1566 assert_eq!(styles_after_clear.len(), 1);
1567 assert_eq!(
1568 styles_after_clear[0].color, initial_color,
1569 "Highlight should revert to initial color after clearing overrides"
1570 );
1571 }
1572
1573 #[gpui::test]
1574 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1575 use collections::IndexMap;
1576 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1577 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1578 use ui::ActiveTheme as _;
1579
1580 init_test(cx, |_| {});
1581
1582 update_test_language_settings(cx, &|language_settings| {
1583 language_settings.languages.0.insert(
1584 "Rust".into(),
1585 LanguageSettingsContent {
1586 semantic_tokens: Some(SemanticTokens::Full),
1587 ..LanguageSettingsContent::default()
1588 },
1589 );
1590 });
1591
1592 let mut cx = EditorLspTestContext::new_rust(
1593 lsp::ServerCapabilities {
1594 semantic_tokens_provider: Some(
1595 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1596 lsp::SemanticTokensOptions {
1597 legend: lsp::SemanticTokensLegend {
1598 token_types: Vec::from(["function".into()]),
1599 token_modifiers: Vec::new(),
1600 },
1601 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1602 ..lsp::SemanticTokensOptions::default()
1603 },
1604 ),
1605 ),
1606 ..lsp::ServerCapabilities::default()
1607 },
1608 cx,
1609 )
1610 .await;
1611
1612 let mut full_request = cx
1613 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1614 move |_, _, _| async move {
1615 Ok(Some(lsp::SemanticTokensResult::Tokens(
1616 lsp::SemanticTokens {
1617 data: vec![
1618 0, // delta_line
1619 3, // delta_start
1620 4, // length
1621 0, // token_type (function)
1622 0, // token_modifiers_bitset
1623 ],
1624 result_id: None,
1625 },
1626 )))
1627 },
1628 );
1629
1630 cx.set_state("ˇfn main() {}");
1631 full_request.next().await;
1632 cx.run_until_parked();
1633
1634 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1635 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1636 let initial_color = initial_styles[0].color;
1637
1638 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1639 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1640 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1641 let green_color: Hsla = Rgba {
1642 r: 0.0,
1643 g: 1.0,
1644 b: 0.0,
1645 a: 1.0,
1646 }
1647 .into();
1648 cx.update(|_, cx| {
1649 SettingsStore::update_global(cx, |store, cx| {
1650 store.update_user_settings(cx, |settings| {
1651 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1652 theme_name.clone(),
1653 ThemeStyleContent {
1654 syntax: IndexMap::from_iter([(
1655 "function".to_string(),
1656 HighlightStyleContent {
1657 color: Some("#00ff00".to_string()),
1658 background_color: None,
1659 font_style: None,
1660 font_weight: None,
1661 },
1662 )]),
1663 ..ThemeStyleContent::default()
1664 },
1665 )]);
1666 });
1667 });
1668 });
1669
1670 cx.executor().advance_clock(Duration::from_millis(200));
1671 cx.run_until_parked();
1672
1673 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1674 assert_eq!(styles_after_override.len(), 1);
1675 assert_eq!(
1676 styles_after_override[0].color,
1677 Some(green_color),
1678 "Highlight should have green color from per-theme override"
1679 );
1680 assert_ne!(
1681 styles_after_override[0].color, initial_color,
1682 "Color should have changed from initial"
1683 );
1684 }
1685
1686 #[gpui::test]
1687 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1688 init_test(cx, |_| {});
1689
1690 update_test_language_settings(cx, &|language_settings| {
1691 language_settings.languages.0.insert(
1692 "Rust".into(),
1693 LanguageSettingsContent {
1694 semantic_tokens: Some(SemanticTokens::Full),
1695 ..LanguageSettingsContent::default()
1696 },
1697 );
1698 });
1699
1700 let mut cx = EditorLspTestContext::new_rust(
1701 lsp::ServerCapabilities {
1702 semantic_tokens_provider: Some(
1703 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1704 lsp::SemanticTokensOptions {
1705 legend: lsp::SemanticTokensLegend {
1706 token_types: vec!["function".into()],
1707 token_modifiers: Vec::new(),
1708 },
1709 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1710 ..lsp::SemanticTokensOptions::default()
1711 },
1712 ),
1713 ),
1714 ..lsp::ServerCapabilities::default()
1715 },
1716 cx,
1717 )
1718 .await;
1719
1720 let mut full_request = cx
1721 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1722 move |_, _, _| async move {
1723 Ok(Some(lsp::SemanticTokensResult::Tokens(
1724 lsp::SemanticTokens {
1725 data: vec![
1726 0, // delta_line
1727 3, // delta_start
1728 4, // length
1729 0, // token_type
1730 0, // token_modifiers_bitset
1731 ],
1732 result_id: None,
1733 },
1734 )))
1735 },
1736 );
1737
1738 cx.set_state("ˇfn main() {}");
1739 assert!(full_request.next().await.is_some());
1740 cx.run_until_parked();
1741
1742 assert_eq!(
1743 extract_semantic_highlights(&cx.editor, &cx),
1744 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1745 "Semantic tokens should be present before stopping the server"
1746 );
1747
1748 cx.update_editor(|editor, _, cx| {
1749 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1750 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1751 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1752 })
1753 });
1754 cx.executor().advance_clock(Duration::from_millis(200));
1755 cx.run_until_parked();
1756
1757 assert_eq!(
1758 extract_semantic_highlights(&cx.editor, &cx),
1759 Vec::new(),
1760 "Semantic tokens should be cleared after stopping the server"
1761 );
1762 }
1763
1764 #[gpui::test]
1765 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1766 init_test(cx, |_| {});
1767
1768 update_test_language_settings(cx, &|language_settings| {
1769 language_settings.languages.0.insert(
1770 "Rust".into(),
1771 LanguageSettingsContent {
1772 semantic_tokens: Some(SemanticTokens::Full),
1773 ..LanguageSettingsContent::default()
1774 },
1775 );
1776 });
1777
1778 let mut cx = EditorLspTestContext::new_rust(
1779 lsp::ServerCapabilities {
1780 semantic_tokens_provider: Some(
1781 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1782 lsp::SemanticTokensOptions {
1783 legend: lsp::SemanticTokensLegend {
1784 token_types: vec!["function".into()],
1785 token_modifiers: Vec::new(),
1786 },
1787 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1788 ..lsp::SemanticTokensOptions::default()
1789 },
1790 ),
1791 ),
1792 ..lsp::ServerCapabilities::default()
1793 },
1794 cx,
1795 )
1796 .await;
1797
1798 let mut full_request = cx
1799 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1800 move |_, _, _| async move {
1801 Ok(Some(lsp::SemanticTokensResult::Tokens(
1802 lsp::SemanticTokens {
1803 data: vec![
1804 0, // delta_line
1805 3, // delta_start
1806 4, // length
1807 0, // token_type
1808 0, // token_modifiers_bitset
1809 ],
1810 result_id: None,
1811 },
1812 )))
1813 },
1814 );
1815
1816 cx.set_state("ˇfn main() {}");
1817 assert!(full_request.next().await.is_some());
1818 cx.run_until_parked();
1819
1820 assert_eq!(
1821 extract_semantic_highlights(&cx.editor, &cx),
1822 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1823 "Semantic tokens should be present before disabling the setting"
1824 );
1825
1826 update_test_language_settings(&mut cx, &|language_settings| {
1827 language_settings.languages.0.insert(
1828 "Rust".into(),
1829 LanguageSettingsContent {
1830 semantic_tokens: Some(SemanticTokens::Off),
1831 ..LanguageSettingsContent::default()
1832 },
1833 );
1834 });
1835 cx.executor().advance_clock(Duration::from_millis(200));
1836 cx.run_until_parked();
1837
1838 assert_eq!(
1839 extract_semantic_highlights(&cx.editor, &cx),
1840 Vec::new(),
1841 "Semantic tokens should be cleared after disabling the setting"
1842 );
1843 }
1844
1845 #[gpui::test]
1846 async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
1847 init_test(cx, |_| {});
1848 update_test_language_settings(cx, &|s| {
1849 s.languages.0.insert(
1850 "Rust".into(),
1851 LanguageSettingsContent {
1852 semantic_tokens: Some(SemanticTokens::Full),
1853 ..Default::default()
1854 },
1855 );
1856 });
1857
1858 let mut cx = EditorLspTestContext::new_rust(
1859 lsp::ServerCapabilities {
1860 semantic_tokens_provider: Some(
1861 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1862 lsp::SemanticTokensOptions {
1863 legend: lsp::SemanticTokensLegend {
1864 token_types: vec!["function".into()],
1865 token_modifiers: vec![],
1866 },
1867 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1868 ..Default::default()
1869 },
1870 ),
1871 ),
1872 ..Default::default()
1873 },
1874 cx,
1875 )
1876 .await;
1877
1878 let mut full_request = cx
1879 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1880 move |_, _, _| async move {
1881 Ok(Some(lsp::SemanticTokensResult::Tokens(
1882 lsp::SemanticTokens {
1883 data: vec![0, 3, 4, 0, 0],
1884 result_id: None,
1885 },
1886 )))
1887 },
1888 );
1889
1890 // Verify it highlights by default
1891 cx.set_state("ˇfn main() {}");
1892 full_request.next().await;
1893 cx.run_until_parked();
1894 assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
1895
1896 // Apply EMPTY rule to disable it
1897 cx.update(|_, cx| {
1898 SettingsStore::update_global(cx, |store, cx| {
1899 store.update_user_settings(cx, |settings| {
1900 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1901 semantic_token_rules: Some(SemanticTokenRules {
1902 rules: vec![SemanticTokenRule {
1903 token_type: Some("function".to_string()),
1904 ..Default::default()
1905 }],
1906 }),
1907 ..Default::default()
1908 });
1909 });
1910 });
1911 });
1912
1913 cx.set_state("ˇfn main() { }");
1914 full_request.next().await;
1915 cx.run_until_parked();
1916
1917 assert!(
1918 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
1919 "Highlighting should be disabled by empty style setting"
1920 );
1921 }
1922
1923 #[gpui::test]
1924 async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
1925 init_test(cx, |_| {});
1926 update_test_language_settings(cx, &|s| {
1927 s.languages.0.insert(
1928 "Rust".into(),
1929 LanguageSettingsContent {
1930 semantic_tokens: Some(SemanticTokens::Full),
1931 ..Default::default()
1932 },
1933 );
1934 });
1935
1936 let mut cx = EditorLspTestContext::new_rust(
1937 lsp::ServerCapabilities {
1938 semantic_tokens_provider: Some(
1939 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1940 lsp::SemanticTokensOptions {
1941 legend: lsp::SemanticTokensLegend {
1942 token_types: vec!["comment".into()],
1943 token_modifiers: vec!["documentation".into()],
1944 },
1945 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1946 ..Default::default()
1947 },
1948 ),
1949 ),
1950 ..Default::default()
1951 },
1952 cx,
1953 )
1954 .await;
1955
1956 let mut full_request = cx
1957 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1958 move |_, _, _| async move {
1959 Ok(Some(lsp::SemanticTokensResult::Tokens(
1960 lsp::SemanticTokens {
1961 data: vec![0, 0, 5, 0, 1], // comment [documentation]
1962 result_id: None,
1963 },
1964 )))
1965 },
1966 );
1967
1968 cx.set_state("ˇ/// d\n");
1969 full_request.next().await;
1970 cx.run_until_parked();
1971 assert_eq!(
1972 extract_semantic_highlights(&cx.editor, &cx).len(),
1973 1,
1974 "Documentation comment should be highlighted"
1975 );
1976
1977 // Apply a BROAD empty rule for "comment" (no modifiers)
1978 cx.update(|_, cx| {
1979 SettingsStore::update_global(cx, |store, cx| {
1980 store.update_user_settings(cx, |settings| {
1981 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1982 semantic_token_rules: Some(SemanticTokenRules {
1983 rules: vec![SemanticTokenRule {
1984 token_type: Some("comment".to_string()),
1985 ..Default::default()
1986 }],
1987 }),
1988 ..Default::default()
1989 });
1990 });
1991 });
1992 });
1993
1994 cx.set_state("ˇ/// d\n");
1995 full_request.next().await;
1996 cx.run_until_parked();
1997
1998 assert!(
1999 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
2000 "Broad empty rule should disable specific documentation comment"
2001 );
2002 }
2003
2004 #[gpui::test]
2005 async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
2006 cx: &mut TestAppContext,
2007 ) {
2008 use gpui::UpdateGlobal as _;
2009 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
2010
2011 init_test(cx, |_| {});
2012 update_test_language_settings(cx, &|s| {
2013 s.languages.0.insert(
2014 "Rust".into(),
2015 LanguageSettingsContent {
2016 semantic_tokens: Some(SemanticTokens::Full),
2017 ..Default::default()
2018 },
2019 );
2020 });
2021
2022 let mut cx = EditorLspTestContext::new_rust(
2023 lsp::ServerCapabilities {
2024 semantic_tokens_provider: Some(
2025 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2026 lsp::SemanticTokensOptions {
2027 legend: lsp::SemanticTokensLegend {
2028 token_types: vec!["comment".into()],
2029 token_modifiers: vec!["documentation".into()],
2030 },
2031 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2032 ..Default::default()
2033 },
2034 ),
2035 ),
2036 ..Default::default()
2037 },
2038 cx,
2039 )
2040 .await;
2041
2042 let mut full_request = cx
2043 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2044 move |_, _, _| async move {
2045 Ok(Some(lsp::SemanticTokensResult::Tokens(
2046 lsp::SemanticTokens {
2047 data: vec![
2048 0, 0, 5, 0, 1, // comment [documentation]
2049 1, 0, 5, 0, 0, // normal comment
2050 ],
2051 result_id: None,
2052 },
2053 )))
2054 },
2055 );
2056
2057 cx.set_state("ˇ/// d\n// n\n");
2058 full_request.next().await;
2059 cx.run_until_parked();
2060 assert_eq!(
2061 extract_semantic_highlights(&cx.editor, &cx).len(),
2062 2,
2063 "Both documentation and normal comments should be highlighted initially"
2064 );
2065
2066 // Apply a SPECIFIC empty rule for documentation only
2067 cx.update(|_, cx| {
2068 SettingsStore::update_global(cx, |store, cx| {
2069 store.update_user_settings(cx, |settings| {
2070 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2071 semantic_token_rules: Some(SemanticTokenRules {
2072 rules: vec![SemanticTokenRule {
2073 token_type: Some("comment".to_string()),
2074 token_modifiers: vec!["documentation".to_string()],
2075 ..Default::default()
2076 }],
2077 }),
2078 ..Default::default()
2079 });
2080 });
2081 });
2082 });
2083
2084 cx.set_state("ˇ/// d\n// n\n");
2085 full_request.next().await;
2086 cx.run_until_parked();
2087
2088 assert_eq!(
2089 extract_semantic_highlights(&cx.editor, &cx).len(),
2090 1,
2091 "Normal comment should still be highlighted (matched by default rule)"
2092 );
2093 }
2094
2095 #[gpui::test]
2096 async fn test_diagnostics_visible_when_semantic_token_set_to_full(cx: &mut TestAppContext) {
2097 init_test(cx, |_| {});
2098
2099 update_test_language_settings(cx, &|language_settings| {
2100 language_settings.languages.0.insert(
2101 "Rust".into(),
2102 LanguageSettingsContent {
2103 semantic_tokens: Some(SemanticTokens::Full),
2104 ..LanguageSettingsContent::default()
2105 },
2106 );
2107 });
2108
2109 let mut cx = EditorLspTestContext::new_rust(
2110 lsp::ServerCapabilities {
2111 semantic_tokens_provider: Some(
2112 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2113 lsp::SemanticTokensOptions {
2114 legend: lsp::SemanticTokensLegend {
2115 token_types: vec!["function".into()],
2116 token_modifiers: Vec::new(),
2117 },
2118 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2119 ..lsp::SemanticTokensOptions::default()
2120 },
2121 ),
2122 ),
2123 ..lsp::ServerCapabilities::default()
2124 },
2125 cx,
2126 )
2127 .await;
2128
2129 let mut full_request = cx
2130 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2131 move |_, _, _| {
2132 async move {
2133 Ok(Some(lsp::SemanticTokensResult::Tokens(
2134 lsp::SemanticTokens {
2135 data: vec![
2136 0, // delta_line
2137 3, // delta_start
2138 4, // length
2139 0, // token_type
2140 0, // token_modifiers_bitset
2141 ],
2142 result_id: Some("a".into()),
2143 },
2144 )))
2145 }
2146 },
2147 );
2148
2149 cx.set_state("ˇfn main() {}");
2150 assert!(full_request.next().await.is_some());
2151
2152 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
2153 task.await;
2154
2155 cx.update_buffer(|buffer, cx| {
2156 buffer.update_diagnostics(
2157 LanguageServerId(0),
2158 DiagnosticSet::new(
2159 [DiagnosticEntry {
2160 range: PointUtf16::new(0, 3)..PointUtf16::new(0, 7),
2161 diagnostic: Diagnostic {
2162 severity: lsp::DiagnosticSeverity::ERROR,
2163 group_id: 1,
2164 message: "unused function".into(),
2165 ..Default::default()
2166 },
2167 }],
2168 buffer,
2169 ),
2170 cx,
2171 )
2172 });
2173
2174 cx.run_until_parked();
2175 let chunks = cx.update_editor(|editor, window, cx| {
2176 editor
2177 .snapshot(window, cx)
2178 .display_snapshot
2179 .chunks(
2180 crate::display_map::DisplayRow(0)..crate::display_map::DisplayRow(1),
2181 LanguageAwareStyling {
2182 tree_sitter: false,
2183 diagnostics: true,
2184 },
2185 crate::HighlightStyles::default(),
2186 )
2187 .map(|chunk| {
2188 (
2189 chunk.text.to_string(),
2190 chunk.diagnostic_severity,
2191 chunk.highlight_style,
2192 )
2193 })
2194 .collect::<Vec<_>>()
2195 });
2196
2197 assert_eq!(
2198 extract_semantic_highlights(&cx.editor, &cx),
2199 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
2200 );
2201
2202 assert!(
2203 chunks.iter().any(
2204 |(text, severity, style): &(
2205 String,
2206 Option<lsp::DiagnosticSeverity>,
2207 Option<gpui::HighlightStyle>
2208 )| {
2209 text == "main"
2210 && *severity == Some(lsp::DiagnosticSeverity::ERROR)
2211 && style.is_some()
2212 }
2213 ),
2214 "expected 'main' chunk to have both diagnostic and semantic styling: {:?}",
2215 chunks
2216 );
2217 }
2218
2219 fn extract_semantic_highlight_styles(
2220 editor: &Entity<Editor>,
2221 cx: &TestAppContext,
2222 ) -> Vec<HighlightStyle> {
2223 editor.read_with(cx, |editor, cx| {
2224 editor
2225 .display_map
2226 .read(cx)
2227 .semantic_token_highlights
2228 .iter()
2229 .flat_map(|(_, (v, interner))| {
2230 v.iter().map(|highlights| interner[highlights.style])
2231 })
2232 .collect()
2233 })
2234 }
2235}