1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::LanguageSettings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_excerpts(true, cx)
152 .into_values()
153 .map(|(buffer, ..)| buffer)
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
159 .semantic_tokens
160 .enabled()
161 {
162 Some((editor_buffer_id, editor_buffer))
163 } else {
164 None
165 }
166 })
167 .collect::<HashMap<_, _>>();
168
169 for buffer_with_disabled_tokens in self
170 .display_map
171 .read(cx)
172 .semantic_token_highlights
173 .keys()
174 .copied()
175 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
176 .filter(|buffer_id| {
177 !self
178 .buffer
179 .read(cx)
180 .buffer(*buffer_id)
181 .is_some_and(|buffer| {
182 let buffer = buffer.read(cx);
183 LanguageSettings::for_buffer(&buffer, cx)
184 .semantic_tokens
185 .enabled()
186 })
187 })
188 .collect::<Vec<_>>()
189 {
190 self.semantic_token_state
191 .invalidate_buffer(&buffer_with_disabled_tokens);
192 self.display_map.update(cx, |display_map, _| {
193 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
194 });
195 }
196
197 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
198 cx.background_executor()
199 .timer(Duration::from_millis(50))
200 .await;
201 let Some(all_semantic_tokens_task) = editor
202 .update(cx, |editor, cx| {
203 buffers_to_query
204 .into_iter()
205 .filter_map(|(buffer_id, buffer)| {
206 let known_version = editor
207 .semantic_token_state
208 .fetched_for_buffers
209 .get(&buffer_id);
210 let query_version = buffer.read(cx).version();
211 if known_version.is_some_and(|known_version| {
212 !query_version.changed_since(known_version)
213 }) {
214 None
215 } else {
216 sema.semantic_tokens(buffer, for_server, cx).map(
217 |task| async move { (buffer_id, query_version, task.await) },
218 )
219 }
220 })
221 .collect::<Vec<_>>()
222 })
223 .ok()
224 else {
225 return;
226 };
227
228 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
229 editor
230 .update(cx, |editor, cx| {
231 editor.display_map.update(cx, |display_map, _| {
232 for buffer_id in invalidate_semantic_highlights_for_buffers {
233 display_map.invalidate_semantic_highlights(buffer_id);
234 editor.semantic_token_state.invalidate_buffer(&buffer_id);
235 }
236 });
237
238 if all_semantic_tokens.is_empty() {
239 return;
240 }
241 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
242
243 for (buffer_id, query_version, tokens) in all_semantic_tokens {
244 let tokens = match tokens {
245 Ok(BufferSemanticTokens {
246 tokens: Some(tokens),
247 }) => tokens,
248 Ok(BufferSemanticTokens { tokens: None }) => {
249 editor.display_map.update(cx, |display_map, _| {
250 display_map.invalidate_semantic_highlights(buffer_id);
251 });
252 continue;
253 }
254 Err(e) => {
255 log::error!(
256 "Failed to fetch semantic tokens for buffer \
257 {buffer_id:?}: {e:#}"
258 );
259 continue;
260 }
261 };
262
263 match editor
264 .semantic_token_state
265 .fetched_for_buffers
266 .entry(buffer_id)
267 {
268 hash_map::Entry::Occupied(mut o) => {
269 if query_version.changed_since(o.get()) {
270 o.insert(query_version);
271 } else {
272 continue;
273 }
274 }
275 hash_map::Entry::Vacant(v) => {
276 v.insert(query_version);
277 }
278 }
279
280 let language_name = editor
281 .buffer()
282 .read(cx)
283 .buffer(buffer_id)
284 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
285
286 editor.display_map.update(cx, |display_map, cx| {
287 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
288 let mut token_highlights = Vec::new();
289 let mut interner = HighlightStyleInterner::default();
290 for (server_id, server_tokens) in tokens {
291 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
292 server_id,
293 language_name.as_ref(),
294 cx,
295 ) else {
296 continue;
297 };
298 token_highlights.reserve(2 * server_tokens.len());
299 token_highlights.extend(buffer_into_editor_highlights(
300 &server_tokens,
301 stylizer,
302 &multi_buffer_snapshot,
303 &mut interner,
304 cx,
305 ));
306 }
307
308 token_highlights.sort_by(|a, b| {
309 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
310 });
311 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
312 buffer_id,
313 (Arc::from(token_highlights), Arc::new(interner)),
314 );
315 });
316 });
317 }
318
319 cx.notify();
320 })
321 .ok();
322 });
323 }
324}
325
326fn buffer_into_editor_highlights<'a, 'b>(
327 buffer_tokens: &'a [BufferSemanticToken],
328 stylizer: &'a SemanticTokenStylizer,
329 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
330 interner: &'b mut HighlightStyleInterner,
331 cx: &'a App,
332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
333 multi_buffer_snapshot
334 .text_anchors_to_visible_anchors(
335 buffer_tokens
336 .iter()
337 .flat_map(|token| [token.range.start, token.range.end]),
338 )
339 .into_iter()
340 .tuples::<(_, _)>()
341 .zip(buffer_tokens)
342 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
343 let range = multi_buffer_start?..multi_buffer_end?;
344 let style = convert_token(
345 stylizer,
346 cx.theme().syntax(),
347 token.token_type,
348 token.token_modifiers,
349 )?;
350 let style = interner.intern(style);
351 Some(SemanticTokenHighlight {
352 range,
353 style,
354 token_type: token.token_type,
355 token_modifiers: token.token_modifiers,
356 server_id: stylizer.server_id(),
357 })
358 })
359}
360
361fn convert_token(
362 stylizer: &SemanticTokenStylizer,
363 theme: &SyntaxTheme,
364 token_type: TokenType,
365 modifiers: u32,
366) -> Option<HighlightStyle> {
367 let rules = stylizer.rules_for_token(token_type)?;
368 let matching: Vec<_> = rules
369 .iter()
370 .filter(|rule| {
371 rule.token_modifiers
372 .iter()
373 .all(|m| stylizer.has_modifier(modifiers, m))
374 })
375 .collect();
376
377 if let Some(rule) = matching.last() {
378 if rule.no_style_defined() {
379 return None;
380 }
381 }
382
383 let mut highlight = HighlightStyle::default();
384 let mut empty = true;
385
386 for rule in matching {
387 empty = false;
388
389 let style = rule
390 .style
391 .iter()
392 .find_map(|style| theme.style_for_name(style));
393
394 macro_rules! overwrite {
395 (
396 highlight.$highlight_field:ident,
397 SemanticTokenRule::$rule_field:ident,
398 $transform:expr $(,)?
399 ) => {
400 highlight.$highlight_field = rule
401 .$rule_field
402 .map($transform)
403 .or_else(|| style.and_then(|s| s.$highlight_field))
404 .or(highlight.$highlight_field)
405 };
406 }
407
408 overwrite!(
409 highlight.color,
410 SemanticTokenRule::foreground_color,
411 Into::into,
412 );
413
414 overwrite!(
415 highlight.background_color,
416 SemanticTokenRule::background_color,
417 Into::into,
418 );
419
420 overwrite!(
421 highlight.font_weight,
422 SemanticTokenRule::font_weight,
423 |w| match w {
424 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
425 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
426 },
427 );
428
429 overwrite!(
430 highlight.font_style,
431 SemanticTokenRule::font_style,
432 |s| match s {
433 SemanticTokenFontStyle::Normal => FontStyle::Normal,
434 SemanticTokenFontStyle::Italic => FontStyle::Italic,
435 },
436 );
437
438 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
439 UnderlineStyle {
440 thickness: 1.0.into(),
441 color: match u {
442 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
443 SemanticTokenColorOverride::InheritForeground(false) => None,
444 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
445 },
446 ..UnderlineStyle::default()
447 }
448 });
449
450 overwrite!(
451 highlight.strikethrough,
452 SemanticTokenRule::strikethrough,
453 |s| StrikethroughStyle {
454 thickness: 1.0.into(),
455 color: match s {
456 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
457 SemanticTokenColorOverride::InheritForeground(false) => None,
458 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
459 },
460 },
461 );
462 }
463
464 if empty { None } else { Some(highlight) }
465}
466
467#[cfg(test)]
468mod tests {
469 use std::{
470 ops::Range,
471 sync::atomic::{self, AtomicUsize},
472 };
473
474 use futures::StreamExt as _;
475 use gpui::{
476 AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
477 };
478 use language::{Language, LanguageConfig, LanguageMatcher};
479 use languages::FakeLspAdapter;
480 use multi_buffer::{
481 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
482 };
483 use project::Project;
484 use rope::Point;
485 use serde_json::json;
486 use settings::{
487 GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
488 SemanticTokens, SettingsStore,
489 };
490 use workspace::{MultiWorkspace, WorkspaceHandle as _};
491
492 use crate::{
493 Capability,
494 editor_tests::{init_test, update_test_language_settings},
495 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
496 };
497
498 use super::*;
499
500 #[gpui::test]
501 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
502 init_test(cx, |_| {});
503
504 update_test_language_settings(cx, &|language_settings| {
505 language_settings.languages.0.insert(
506 "Rust".into(),
507 LanguageSettingsContent {
508 semantic_tokens: Some(SemanticTokens::Full),
509 ..LanguageSettingsContent::default()
510 },
511 );
512 });
513
514 let mut cx = EditorLspTestContext::new_rust(
515 lsp::ServerCapabilities {
516 semantic_tokens_provider: Some(
517 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
518 lsp::SemanticTokensOptions {
519 legend: lsp::SemanticTokensLegend {
520 token_types: vec!["function".into()],
521 token_modifiers: Vec::new(),
522 },
523 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
524 ..lsp::SemanticTokensOptions::default()
525 },
526 ),
527 ),
528 ..lsp::ServerCapabilities::default()
529 },
530 cx,
531 )
532 .await;
533
534 let full_counter = Arc::new(AtomicUsize::new(0));
535 let full_counter_clone = full_counter.clone();
536
537 let mut full_request = cx
538 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
539 move |_, _, _| {
540 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
541 async move {
542 Ok(Some(lsp::SemanticTokensResult::Tokens(
543 lsp::SemanticTokens {
544 data: vec![
545 0, // delta_line
546 3, // delta_start
547 4, // length
548 0, // token_type
549 0, // token_modifiers_bitset
550 ],
551 // The server isn't capable of deltas, so even though we sent back
552 // a result ID, the client shouldn't request a delta.
553 result_id: Some("a".into()),
554 },
555 )))
556 }
557 },
558 );
559
560 cx.set_state("ˇfn main() {}");
561 assert!(full_request.next().await.is_some());
562
563 cx.run_until_parked();
564
565 cx.set_state("ˇfn main() { a }");
566 assert!(full_request.next().await.is_some());
567
568 cx.run_until_parked();
569
570 assert_eq!(
571 extract_semantic_highlights(&cx.editor, &cx),
572 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
573 );
574
575 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
576 }
577
578 #[gpui::test]
579 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
580 init_test(cx, |_| {});
581
582 update_test_language_settings(cx, &|language_settings| {
583 language_settings.languages.0.insert(
584 "Rust".into(),
585 LanguageSettingsContent {
586 semantic_tokens: Some(SemanticTokens::Full),
587 ..LanguageSettingsContent::default()
588 },
589 );
590 });
591
592 let mut cx = EditorLspTestContext::new_rust(
593 lsp::ServerCapabilities {
594 semantic_tokens_provider: Some(
595 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
596 lsp::SemanticTokensOptions {
597 legend: lsp::SemanticTokensLegend {
598 token_types: vec!["function".into()],
599 token_modifiers: Vec::new(),
600 },
601 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
602 ..lsp::SemanticTokensOptions::default()
603 },
604 ),
605 ),
606 ..lsp::ServerCapabilities::default()
607 },
608 cx,
609 )
610 .await;
611
612 let full_counter = Arc::new(AtomicUsize::new(0));
613 let full_counter_clone = full_counter.clone();
614
615 let mut full_request = cx
616 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
617 move |_, _, _| {
618 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
619 async move {
620 Ok(Some(lsp::SemanticTokensResult::Tokens(
621 lsp::SemanticTokens {
622 data: vec![
623 0, // delta_line
624 3, // delta_start
625 4, // length
626 0, // token_type
627 0, // token_modifiers_bitset
628 ],
629 result_id: None, // Sending back `None` forces the client to not use deltas.
630 },
631 )))
632 }
633 },
634 );
635
636 cx.set_state("ˇfn main() {}");
637 assert!(full_request.next().await.is_some());
638
639 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
640 task.await;
641
642 cx.set_state("ˇfn main() { a }");
643 assert!(full_request.next().await.is_some());
644
645 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
646 task.await;
647 assert_eq!(
648 extract_semantic_highlights(&cx.editor, &cx),
649 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
650 );
651 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
652 }
653
654 #[gpui::test]
655 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
656 init_test(cx, |_| {});
657
658 update_test_language_settings(cx, &|language_settings| {
659 language_settings.languages.0.insert(
660 "Rust".into(),
661 LanguageSettingsContent {
662 semantic_tokens: Some(SemanticTokens::Full),
663 ..LanguageSettingsContent::default()
664 },
665 );
666 });
667
668 let mut cx = EditorLspTestContext::new_rust(
669 lsp::ServerCapabilities {
670 semantic_tokens_provider: Some(
671 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
672 lsp::SemanticTokensOptions {
673 legend: lsp::SemanticTokensLegend {
674 token_types: vec!["function".into()],
675 token_modifiers: Vec::new(),
676 },
677 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
678 ..lsp::SemanticTokensOptions::default()
679 },
680 ),
681 ),
682 ..lsp::ServerCapabilities::default()
683 },
684 cx,
685 )
686 .await;
687
688 let full_counter = Arc::new(AtomicUsize::new(0));
689 let full_counter_clone = full_counter.clone();
690 let delta_counter = Arc::new(AtomicUsize::new(0));
691 let delta_counter_clone = delta_counter.clone();
692
693 let mut full_request = cx
694 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
695 move |_, _, _| {
696 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
697 async move {
698 Ok(Some(lsp::SemanticTokensResult::Tokens(
699 lsp::SemanticTokens {
700 data: vec![
701 0, // delta_line
702 3, // delta_start
703 4, // length
704 0, // token_type
705 0, // token_modifiers_bitset
706 ],
707 result_id: Some("a".into()),
708 },
709 )))
710 }
711 },
712 );
713
714 let mut delta_request = cx
715 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
716 move |_, params, _| {
717 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
718 assert_eq!(params.previous_result_id, "a");
719 async move {
720 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
721 lsp::SemanticTokensDelta {
722 edits: Vec::new(),
723 result_id: Some("b".into()),
724 },
725 )))
726 }
727 },
728 );
729
730 // Initial request, for the empty buffer.
731 cx.set_state("ˇfn main() {}");
732 assert!(full_request.next().await.is_some());
733 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
734 task.await;
735
736 cx.set_state("ˇfn main() { a }");
737 assert!(delta_request.next().await.is_some());
738 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
739 task.await;
740
741 assert_eq!(
742 extract_semantic_highlights(&cx.editor, &cx),
743 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
744 );
745
746 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
747 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
748 }
749
750 #[gpui::test]
751 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
752 init_test(cx, |_| {});
753
754 update_test_language_settings(cx, &|language_settings| {
755 language_settings.languages.0.insert(
756 "TOML".into(),
757 LanguageSettingsContent {
758 semantic_tokens: Some(SemanticTokens::Full),
759 ..LanguageSettingsContent::default()
760 },
761 );
762 });
763
764 let toml_language = Arc::new(Language::new(
765 LanguageConfig {
766 name: "TOML".into(),
767 matcher: LanguageMatcher {
768 path_suffixes: vec!["toml".into()],
769 ..LanguageMatcher::default()
770 },
771 ..LanguageConfig::default()
772 },
773 None,
774 ));
775
776 // We have 2 language servers for TOML in this test.
777 let toml_legend_1 = lsp::SemanticTokensLegend {
778 token_types: vec!["property".into()],
779 token_modifiers: Vec::new(),
780 };
781 let toml_legend_2 = lsp::SemanticTokensLegend {
782 token_types: vec!["number".into()],
783 token_modifiers: Vec::new(),
784 };
785
786 let app_state = cx.update(workspace::AppState::test);
787
788 cx.update(|cx| {
789 assets::Assets.load_test_fonts(cx);
790 crate::init(cx);
791 workspace::init(app_state.clone(), cx);
792 });
793
794 let project = Project::test(app_state.fs.clone(), [], cx).await;
795 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
796
797 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
798 let full_counter_toml_1_clone = full_counter_toml_1.clone();
799 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
800 let full_counter_toml_2_clone = full_counter_toml_2.clone();
801
802 let mut toml_server_1 = language_registry.register_fake_lsp(
803 toml_language.name(),
804 FakeLspAdapter {
805 name: "toml1",
806 capabilities: lsp::ServerCapabilities {
807 semantic_tokens_provider: Some(
808 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
809 lsp::SemanticTokensOptions {
810 legend: toml_legend_1,
811 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
812 ..lsp::SemanticTokensOptions::default()
813 },
814 ),
815 ),
816 ..lsp::ServerCapabilities::default()
817 },
818 initializer: Some(Box::new({
819 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
820 move |fake_server| {
821 let full_counter = full_counter_toml_1_clone.clone();
822 fake_server
823 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
824 move |_, _| {
825 full_counter.fetch_add(1, atomic::Ordering::Release);
826 async move {
827 Ok(Some(lsp::SemanticTokensResult::Tokens(
828 lsp::SemanticTokens {
829 // highlight 'a' as a property
830 data: vec![
831 0, // delta_line
832 0, // delta_start
833 1, // length
834 0, // token_type
835 0, // token_modifiers_bitset
836 ],
837 result_id: Some("a".into()),
838 },
839 )))
840 }
841 },
842 );
843 }
844 })),
845 ..FakeLspAdapter::default()
846 },
847 );
848 let mut toml_server_2 = language_registry.register_fake_lsp(
849 toml_language.name(),
850 FakeLspAdapter {
851 name: "toml2",
852 capabilities: lsp::ServerCapabilities {
853 semantic_tokens_provider: Some(
854 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
855 lsp::SemanticTokensOptions {
856 legend: toml_legend_2,
857 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
858 ..lsp::SemanticTokensOptions::default()
859 },
860 ),
861 ),
862 ..lsp::ServerCapabilities::default()
863 },
864 initializer: Some(Box::new({
865 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
866 move |fake_server| {
867 let full_counter = full_counter_toml_2_clone.clone();
868 fake_server
869 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
870 move |_, _| {
871 full_counter.fetch_add(1, atomic::Ordering::Release);
872 async move {
873 Ok(Some(lsp::SemanticTokensResult::Tokens(
874 lsp::SemanticTokens {
875 // highlight '3' as a literal
876 data: vec![
877 0, // delta_line
878 4, // delta_start
879 1, // length
880 0, // token_type
881 0, // token_modifiers_bitset
882 ],
883 result_id: Some("a".into()),
884 },
885 )))
886 }
887 },
888 );
889 }
890 })),
891 ..FakeLspAdapter::default()
892 },
893 );
894 language_registry.add(toml_language.clone());
895
896 app_state
897 .fs
898 .as_fake()
899 .insert_tree(
900 EditorLspTestContext::root_path(),
901 json!({
902 ".git": {},
903 "dir": {
904 "foo.toml": "a = 1\nb = 2\n",
905 }
906 }),
907 )
908 .await;
909
910 let (multi_workspace, cx) =
911 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
912 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
913 project
914 .update(cx, |project, cx| {
915 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
916 })
917 .await
918 .unwrap();
919 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
920 .await;
921
922 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
923 let toml_item = workspace
924 .update_in(cx, |workspace, window, cx| {
925 workspace.open_path(toml_file, None, true, window, cx)
926 })
927 .await
928 .expect("Could not open test file");
929
930 let editor = cx.update(|_, cx| {
931 toml_item
932 .act_as::<Editor>(cx)
933 .expect("Opened test file wasn't an editor")
934 });
935
936 editor.update_in(cx, |editor, window, cx| {
937 let nav_history = workspace
938 .read(cx)
939 .active_pane()
940 .read(cx)
941 .nav_history_for_item(&cx.entity());
942 editor.set_nav_history(Some(nav_history));
943 window.focus(&editor.focus_handle(cx), cx)
944 });
945
946 let _toml_server_1 = toml_server_1.next().await.unwrap();
947 let _toml_server_2 = toml_server_2.next().await.unwrap();
948
949 // Trigger semantic tokens.
950 editor.update_in(cx, |editor, _, cx| {
951 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
952 });
953 cx.executor().advance_clock(Duration::from_millis(200));
954 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
955 cx.run_until_parked();
956 task.await;
957
958 assert_eq!(
959 extract_semantic_highlights(&editor, &cx),
960 vec![
961 MultiBufferOffset(0)..MultiBufferOffset(1),
962 MultiBufferOffset(4)..MultiBufferOffset(5),
963 ]
964 );
965
966 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
967 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
968 }
969
970 #[gpui::test]
971 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
972 init_test(cx, |_| {});
973
974 update_test_language_settings(cx, &|language_settings| {
975 language_settings.languages.0.insert(
976 "TOML".into(),
977 LanguageSettingsContent {
978 semantic_tokens: Some(SemanticTokens::Full),
979 ..LanguageSettingsContent::default()
980 },
981 );
982 language_settings.languages.0.insert(
983 "Rust".into(),
984 LanguageSettingsContent {
985 semantic_tokens: Some(SemanticTokens::Full),
986 ..LanguageSettingsContent::default()
987 },
988 );
989 });
990
991 let toml_language = Arc::new(Language::new(
992 LanguageConfig {
993 name: "TOML".into(),
994 matcher: LanguageMatcher {
995 path_suffixes: vec!["toml".into()],
996 ..LanguageMatcher::default()
997 },
998 ..LanguageConfig::default()
999 },
1000 None,
1001 ));
1002 let rust_language = Arc::new(Language::new(
1003 LanguageConfig {
1004 name: "Rust".into(),
1005 matcher: LanguageMatcher {
1006 path_suffixes: vec!["rs".into()],
1007 ..LanguageMatcher::default()
1008 },
1009 ..LanguageConfig::default()
1010 },
1011 None,
1012 ));
1013
1014 let toml_legend = lsp::SemanticTokensLegend {
1015 token_types: vec!["property".into()],
1016 token_modifiers: Vec::new(),
1017 };
1018 let rust_legend = lsp::SemanticTokensLegend {
1019 token_types: vec!["constant".into()],
1020 token_modifiers: Vec::new(),
1021 };
1022
1023 let app_state = cx.update(workspace::AppState::test);
1024
1025 cx.update(|cx| {
1026 assets::Assets.load_test_fonts(cx);
1027 crate::init(cx);
1028 workspace::init(app_state.clone(), cx);
1029 });
1030
1031 let project = Project::test(app_state.fs.clone(), [], cx).await;
1032 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1033 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1034 let full_counter_toml_clone = full_counter_toml.clone();
1035
1036 let mut toml_server = language_registry.register_fake_lsp(
1037 toml_language.name(),
1038 FakeLspAdapter {
1039 name: "toml",
1040 capabilities: lsp::ServerCapabilities {
1041 semantic_tokens_provider: Some(
1042 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1043 lsp::SemanticTokensOptions {
1044 legend: toml_legend,
1045 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1046 ..lsp::SemanticTokensOptions::default()
1047 },
1048 ),
1049 ),
1050 ..lsp::ServerCapabilities::default()
1051 },
1052 initializer: Some(Box::new({
1053 let full_counter_toml_clone = full_counter_toml_clone.clone();
1054 move |fake_server| {
1055 let full_counter = full_counter_toml_clone.clone();
1056 fake_server
1057 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1058 move |_, _| {
1059 full_counter.fetch_add(1, atomic::Ordering::Release);
1060 async move {
1061 Ok(Some(lsp::SemanticTokensResult::Tokens(
1062 lsp::SemanticTokens {
1063 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1064 data: vec![
1065 0, // delta_line (line 0)
1066 0, // delta_start
1067 1, // length
1068 0, // token_type
1069 0, // token_modifiers_bitset
1070 1, // delta_line (line 1)
1071 0, // delta_start
1072 1, // length
1073 0, // token_type
1074 0, // token_modifiers_bitset
1075 1, // delta_line (line 2)
1076 0, // delta_start
1077 1, // length
1078 0, // token_type
1079 0, // token_modifiers_bitset
1080 ],
1081 result_id: Some("a".into()),
1082 },
1083 )))
1084 }
1085 },
1086 );
1087 }
1088 })),
1089 ..FakeLspAdapter::default()
1090 },
1091 );
1092 language_registry.add(toml_language.clone());
1093 let mut rust_server = language_registry.register_fake_lsp(
1094 rust_language.name(),
1095 FakeLspAdapter {
1096 name: "rust",
1097 capabilities: lsp::ServerCapabilities {
1098 semantic_tokens_provider: Some(
1099 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1100 lsp::SemanticTokensOptions {
1101 legend: rust_legend,
1102 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1103 ..lsp::SemanticTokensOptions::default()
1104 },
1105 ),
1106 ),
1107 ..lsp::ServerCapabilities::default()
1108 },
1109 ..FakeLspAdapter::default()
1110 },
1111 );
1112 language_registry.add(rust_language.clone());
1113
1114 app_state
1115 .fs
1116 .as_fake()
1117 .insert_tree(
1118 EditorLspTestContext::root_path(),
1119 json!({
1120 ".git": {},
1121 "dir": {
1122 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1123 "bar.rs": "const c: usize = 3;\n",
1124 }
1125 }),
1126 )
1127 .await;
1128
1129 let (multi_workspace, cx) =
1130 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1131 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1132 project
1133 .update(cx, |project, cx| {
1134 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1135 })
1136 .await
1137 .unwrap();
1138 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1139 .await;
1140
1141 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1142 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1143 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1144 (
1145 workspace.open_path(toml_file, None, true, window, cx),
1146 workspace.open_path(rust_file, None, true, window, cx),
1147 )
1148 });
1149 let toml_item = toml_item.await.expect("Could not open test file");
1150 let rust_item = rust_item.await.expect("Could not open test file");
1151
1152 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1153 (
1154 toml_item
1155 .act_as::<Editor>(cx)
1156 .expect("Opened test file wasn't an editor"),
1157 rust_item
1158 .act_as::<Editor>(cx)
1159 .expect("Opened test file wasn't an editor"),
1160 )
1161 });
1162 let toml_buffer = cx.read(|cx| {
1163 toml_editor
1164 .read(cx)
1165 .buffer()
1166 .read(cx)
1167 .as_singleton()
1168 .unwrap()
1169 });
1170 let rust_buffer = cx.read(|cx| {
1171 rust_editor
1172 .read(cx)
1173 .buffer()
1174 .read(cx)
1175 .as_singleton()
1176 .unwrap()
1177 });
1178 let multibuffer = cx.new(|cx| {
1179 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1180 multibuffer.set_excerpts_for_path(
1181 PathKey::sorted(0),
1182 toml_buffer.clone(),
1183 [Point::new(0, 0)..Point::new(0, 4)],
1184 0,
1185 cx,
1186 );
1187 multibuffer.set_excerpts_for_path(
1188 PathKey::sorted(1),
1189 rust_buffer.clone(),
1190 [Point::new(0, 0)..Point::new(0, 4)],
1191 0,
1192 cx,
1193 );
1194 multibuffer
1195 });
1196
1197 let editor = workspace.update_in(cx, |workspace, window, cx| {
1198 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1199 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1200 editor
1201 });
1202 editor.update_in(cx, |editor, window, cx| {
1203 let nav_history = workspace
1204 .read(cx)
1205 .active_pane()
1206 .read(cx)
1207 .nav_history_for_item(&cx.entity());
1208 editor.set_nav_history(Some(nav_history));
1209 window.focus(&editor.focus_handle(cx), cx)
1210 });
1211
1212 let _toml_server = toml_server.next().await.unwrap();
1213 let _rust_server = rust_server.next().await.unwrap();
1214
1215 // Initial request.
1216 cx.executor().advance_clock(Duration::from_millis(200));
1217 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1218 cx.run_until_parked();
1219 task.await;
1220 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1221 cx.run_until_parked();
1222
1223 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1224 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1225 assert_eq!(
1226 extract_semantic_highlights(&editor, &cx),
1227 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1228 );
1229
1230 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1231 let toml_excerpt_id =
1232 editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1233 editor.update_in(cx, |editor, _, cx| {
1234 editor.buffer().update(cx, |buffer, cx| {
1235 buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1236 });
1237 });
1238
1239 // Wait for semantic tokens to be re-fetched after expansion.
1240 cx.executor().advance_clock(Duration::from_millis(200));
1241 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1242 cx.run_until_parked();
1243 task.await;
1244
1245 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1246 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1247 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1248 assert_eq!(
1249 extract_semantic_highlights(&editor, &cx),
1250 vec![
1251 MultiBufferOffset(0)..MultiBufferOffset(1),
1252 MultiBufferOffset(6)..MultiBufferOffset(7),
1253 MultiBufferOffset(12)..MultiBufferOffset(13),
1254 ]
1255 );
1256 }
1257
1258 fn extract_semantic_highlights(
1259 editor: &Entity<Editor>,
1260 cx: &TestAppContext,
1261 ) -> Vec<Range<MultiBufferOffset>> {
1262 editor.read_with(cx, |editor, cx| {
1263 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1264 editor
1265 .display_map
1266 .read(cx)
1267 .semantic_token_highlights
1268 .iter()
1269 .flat_map(|(_, (v, _))| v.iter())
1270 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1271 .collect()
1272 })
1273 }
1274
1275 #[gpui::test]
1276 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1277 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1278 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1279
1280 init_test(cx, |_| {});
1281
1282 update_test_language_settings(cx, &|language_settings| {
1283 language_settings.languages.0.insert(
1284 "Rust".into(),
1285 LanguageSettingsContent {
1286 semantic_tokens: Some(SemanticTokens::Full),
1287 ..LanguageSettingsContent::default()
1288 },
1289 );
1290 });
1291
1292 let mut cx = EditorLspTestContext::new_rust(
1293 lsp::ServerCapabilities {
1294 semantic_tokens_provider: Some(
1295 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1296 lsp::SemanticTokensOptions {
1297 legend: lsp::SemanticTokensLegend {
1298 token_types: Vec::from(["function".into()]),
1299 token_modifiers: Vec::new(),
1300 },
1301 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1302 ..lsp::SemanticTokensOptions::default()
1303 },
1304 ),
1305 ),
1306 ..lsp::ServerCapabilities::default()
1307 },
1308 cx,
1309 )
1310 .await;
1311
1312 let mut full_request = cx
1313 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1314 move |_, _, _| {
1315 async move {
1316 Ok(Some(lsp::SemanticTokensResult::Tokens(
1317 lsp::SemanticTokens {
1318 data: vec![
1319 0, // delta_line
1320 3, // delta_start
1321 4, // length
1322 0, // token_type (function)
1323 0, // token_modifiers_bitset
1324 ],
1325 result_id: None,
1326 },
1327 )))
1328 }
1329 },
1330 );
1331
1332 // Trigger initial semantic tokens fetch
1333 cx.set_state("ˇfn main() {}");
1334 full_request.next().await;
1335 cx.run_until_parked();
1336
1337 // Verify initial highlights exist (with no custom color yet)
1338 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1339 assert_eq!(
1340 initial_ranges,
1341 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1342 "Should have initial semantic token highlights"
1343 );
1344 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1345 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1346 // Initial color should be None or theme default (not red or blue)
1347 let initial_color = initial_styles[0].color;
1348
1349 // Set a custom foreground color for function tokens via settings.json
1350 let red_color = Rgba {
1351 r: 1.0,
1352 g: 0.0,
1353 b: 0.0,
1354 a: 1.0,
1355 };
1356 cx.update(|_, cx| {
1357 SettingsStore::update_global(cx, |store, cx| {
1358 store.update_user_settings(cx, |settings| {
1359 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1360 semantic_token_rules: Some(SemanticTokenRules {
1361 rules: Vec::from([SemanticTokenRule {
1362 token_type: Some("function".to_string()),
1363 foreground_color: Some(red_color),
1364 ..SemanticTokenRule::default()
1365 }]),
1366 }),
1367 ..GlobalLspSettingsContent::default()
1368 });
1369 });
1370 });
1371 });
1372
1373 // Trigger a refetch by making an edit (which forces semantic tokens update)
1374 cx.set_state("ˇfn main() { }");
1375 full_request.next().await;
1376 cx.run_until_parked();
1377
1378 // Verify the highlights now have the custom red color
1379 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1380 assert_eq!(
1381 styles_after_settings_change.len(),
1382 1,
1383 "Should still have one highlight"
1384 );
1385 assert_eq!(
1386 styles_after_settings_change[0].color,
1387 Some(Hsla::from(red_color)),
1388 "Highlight should have the custom red color from settings.json"
1389 );
1390 assert_ne!(
1391 styles_after_settings_change[0].color, initial_color,
1392 "Color should have changed from initial"
1393 );
1394 }
1395
1396 #[gpui::test]
1397 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1398 use collections::IndexMap;
1399 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1400 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1401
1402 init_test(cx, |_| {});
1403
1404 update_test_language_settings(cx, &|language_settings| {
1405 language_settings.languages.0.insert(
1406 "Rust".into(),
1407 LanguageSettingsContent {
1408 semantic_tokens: Some(SemanticTokens::Full),
1409 ..LanguageSettingsContent::default()
1410 },
1411 );
1412 });
1413
1414 let mut cx = EditorLspTestContext::new_rust(
1415 lsp::ServerCapabilities {
1416 semantic_tokens_provider: Some(
1417 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1418 lsp::SemanticTokensOptions {
1419 legend: lsp::SemanticTokensLegend {
1420 token_types: Vec::from(["function".into()]),
1421 token_modifiers: Vec::new(),
1422 },
1423 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1424 ..lsp::SemanticTokensOptions::default()
1425 },
1426 ),
1427 ),
1428 ..lsp::ServerCapabilities::default()
1429 },
1430 cx,
1431 )
1432 .await;
1433
1434 let mut full_request = cx
1435 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1436 move |_, _, _| async move {
1437 Ok(Some(lsp::SemanticTokensResult::Tokens(
1438 lsp::SemanticTokens {
1439 data: vec![
1440 0, // delta_line
1441 3, // delta_start
1442 4, // length
1443 0, // token_type (function)
1444 0, // token_modifiers_bitset
1445 ],
1446 result_id: None,
1447 },
1448 )))
1449 },
1450 );
1451
1452 cx.set_state("ˇfn main() {}");
1453 full_request.next().await;
1454 cx.run_until_parked();
1455
1456 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1457 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1458 let initial_color = initial_styles[0].color;
1459
1460 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1461 // which fires theme_changed → refresh_semantic_token_highlights.
1462 let red_color: Hsla = Rgba {
1463 r: 1.0,
1464 g: 0.0,
1465 b: 0.0,
1466 a: 1.0,
1467 }
1468 .into();
1469 cx.update(|_, cx| {
1470 SettingsStore::update_global(cx, |store, cx| {
1471 store.update_user_settings(cx, |settings| {
1472 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1473 syntax: IndexMap::from_iter([(
1474 "function".to_string(),
1475 HighlightStyleContent {
1476 color: Some("#ff0000".to_string()),
1477 background_color: None,
1478 font_style: None,
1479 font_weight: None,
1480 },
1481 )]),
1482 ..ThemeStyleContent::default()
1483 });
1484 });
1485 });
1486 });
1487
1488 cx.executor().advance_clock(Duration::from_millis(200));
1489 cx.run_until_parked();
1490
1491 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1492 assert_eq!(styles_after_override.len(), 1);
1493 assert_eq!(
1494 styles_after_override[0].color,
1495 Some(red_color),
1496 "Highlight should have red color from theme override"
1497 );
1498 assert_ne!(
1499 styles_after_override[0].color, initial_color,
1500 "Color should have changed from initial"
1501 );
1502
1503 // Changing the override to a different color also restyles.
1504 let blue_color: Hsla = Rgba {
1505 r: 0.0,
1506 g: 0.0,
1507 b: 1.0,
1508 a: 1.0,
1509 }
1510 .into();
1511 cx.update(|_, cx| {
1512 SettingsStore::update_global(cx, |store, cx| {
1513 store.update_user_settings(cx, |settings| {
1514 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1515 syntax: IndexMap::from_iter([(
1516 "function".to_string(),
1517 HighlightStyleContent {
1518 color: Some("#0000ff".to_string()),
1519 background_color: None,
1520 font_style: None,
1521 font_weight: None,
1522 },
1523 )]),
1524 ..ThemeStyleContent::default()
1525 });
1526 });
1527 });
1528 });
1529
1530 cx.executor().advance_clock(Duration::from_millis(200));
1531 cx.run_until_parked();
1532
1533 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1534 assert_eq!(styles_after_second_override.len(), 1);
1535 assert_eq!(
1536 styles_after_second_override[0].color,
1537 Some(blue_color),
1538 "Highlight should have blue color from updated theme override"
1539 );
1540
1541 // Removing overrides reverts to the original theme color.
1542 cx.update(|_, cx| {
1543 SettingsStore::update_global(cx, |store, cx| {
1544 store.update_user_settings(cx, |settings| {
1545 settings.theme.experimental_theme_overrides = None;
1546 });
1547 });
1548 });
1549
1550 cx.executor().advance_clock(Duration::from_millis(200));
1551 cx.run_until_parked();
1552
1553 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1554 assert_eq!(styles_after_clear.len(), 1);
1555 assert_eq!(
1556 styles_after_clear[0].color, initial_color,
1557 "Highlight should revert to initial color after clearing overrides"
1558 );
1559 }
1560
1561 #[gpui::test]
1562 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1563 use collections::IndexMap;
1564 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1565 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1566 use ui::ActiveTheme as _;
1567
1568 init_test(cx, |_| {});
1569
1570 update_test_language_settings(cx, &|language_settings| {
1571 language_settings.languages.0.insert(
1572 "Rust".into(),
1573 LanguageSettingsContent {
1574 semantic_tokens: Some(SemanticTokens::Full),
1575 ..LanguageSettingsContent::default()
1576 },
1577 );
1578 });
1579
1580 let mut cx = EditorLspTestContext::new_rust(
1581 lsp::ServerCapabilities {
1582 semantic_tokens_provider: Some(
1583 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1584 lsp::SemanticTokensOptions {
1585 legend: lsp::SemanticTokensLegend {
1586 token_types: Vec::from(["function".into()]),
1587 token_modifiers: Vec::new(),
1588 },
1589 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1590 ..lsp::SemanticTokensOptions::default()
1591 },
1592 ),
1593 ),
1594 ..lsp::ServerCapabilities::default()
1595 },
1596 cx,
1597 )
1598 .await;
1599
1600 let mut full_request = cx
1601 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1602 move |_, _, _| async move {
1603 Ok(Some(lsp::SemanticTokensResult::Tokens(
1604 lsp::SemanticTokens {
1605 data: vec![
1606 0, // delta_line
1607 3, // delta_start
1608 4, // length
1609 0, // token_type (function)
1610 0, // token_modifiers_bitset
1611 ],
1612 result_id: None,
1613 },
1614 )))
1615 },
1616 );
1617
1618 cx.set_state("ˇfn main() {}");
1619 full_request.next().await;
1620 cx.run_until_parked();
1621
1622 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1623 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1624 let initial_color = initial_styles[0].color;
1625
1626 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1627 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1628 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1629 let green_color: Hsla = Rgba {
1630 r: 0.0,
1631 g: 1.0,
1632 b: 0.0,
1633 a: 1.0,
1634 }
1635 .into();
1636 cx.update(|_, cx| {
1637 SettingsStore::update_global(cx, |store, cx| {
1638 store.update_user_settings(cx, |settings| {
1639 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1640 theme_name.clone(),
1641 ThemeStyleContent {
1642 syntax: IndexMap::from_iter([(
1643 "function".to_string(),
1644 HighlightStyleContent {
1645 color: Some("#00ff00".to_string()),
1646 background_color: None,
1647 font_style: None,
1648 font_weight: None,
1649 },
1650 )]),
1651 ..ThemeStyleContent::default()
1652 },
1653 )]);
1654 });
1655 });
1656 });
1657
1658 cx.executor().advance_clock(Duration::from_millis(200));
1659 cx.run_until_parked();
1660
1661 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1662 assert_eq!(styles_after_override.len(), 1);
1663 assert_eq!(
1664 styles_after_override[0].color,
1665 Some(green_color),
1666 "Highlight should have green color from per-theme override"
1667 );
1668 assert_ne!(
1669 styles_after_override[0].color, initial_color,
1670 "Color should have changed from initial"
1671 );
1672 }
1673
1674 #[gpui::test]
1675 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1676 init_test(cx, |_| {});
1677
1678 update_test_language_settings(cx, &|language_settings| {
1679 language_settings.languages.0.insert(
1680 "Rust".into(),
1681 LanguageSettingsContent {
1682 semantic_tokens: Some(SemanticTokens::Full),
1683 ..LanguageSettingsContent::default()
1684 },
1685 );
1686 });
1687
1688 let mut cx = EditorLspTestContext::new_rust(
1689 lsp::ServerCapabilities {
1690 semantic_tokens_provider: Some(
1691 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1692 lsp::SemanticTokensOptions {
1693 legend: lsp::SemanticTokensLegend {
1694 token_types: vec!["function".into()],
1695 token_modifiers: Vec::new(),
1696 },
1697 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1698 ..lsp::SemanticTokensOptions::default()
1699 },
1700 ),
1701 ),
1702 ..lsp::ServerCapabilities::default()
1703 },
1704 cx,
1705 )
1706 .await;
1707
1708 let mut full_request = cx
1709 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1710 move |_, _, _| async move {
1711 Ok(Some(lsp::SemanticTokensResult::Tokens(
1712 lsp::SemanticTokens {
1713 data: vec![
1714 0, // delta_line
1715 3, // delta_start
1716 4, // length
1717 0, // token_type
1718 0, // token_modifiers_bitset
1719 ],
1720 result_id: None,
1721 },
1722 )))
1723 },
1724 );
1725
1726 cx.set_state("ˇfn main() {}");
1727 assert!(full_request.next().await.is_some());
1728 cx.run_until_parked();
1729
1730 assert_eq!(
1731 extract_semantic_highlights(&cx.editor, &cx),
1732 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1733 "Semantic tokens should be present before stopping the server"
1734 );
1735
1736 cx.update_editor(|editor, _, cx| {
1737 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1738 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1739 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1740 })
1741 });
1742 cx.executor().advance_clock(Duration::from_millis(200));
1743 cx.run_until_parked();
1744
1745 assert_eq!(
1746 extract_semantic_highlights(&cx.editor, &cx),
1747 Vec::new(),
1748 "Semantic tokens should be cleared after stopping the server"
1749 );
1750 }
1751
1752 #[gpui::test]
1753 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1754 init_test(cx, |_| {});
1755
1756 update_test_language_settings(cx, &|language_settings| {
1757 language_settings.languages.0.insert(
1758 "Rust".into(),
1759 LanguageSettingsContent {
1760 semantic_tokens: Some(SemanticTokens::Full),
1761 ..LanguageSettingsContent::default()
1762 },
1763 );
1764 });
1765
1766 let mut cx = EditorLspTestContext::new_rust(
1767 lsp::ServerCapabilities {
1768 semantic_tokens_provider: Some(
1769 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1770 lsp::SemanticTokensOptions {
1771 legend: lsp::SemanticTokensLegend {
1772 token_types: vec!["function".into()],
1773 token_modifiers: Vec::new(),
1774 },
1775 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1776 ..lsp::SemanticTokensOptions::default()
1777 },
1778 ),
1779 ),
1780 ..lsp::ServerCapabilities::default()
1781 },
1782 cx,
1783 )
1784 .await;
1785
1786 let mut full_request = cx
1787 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1788 move |_, _, _| async move {
1789 Ok(Some(lsp::SemanticTokensResult::Tokens(
1790 lsp::SemanticTokens {
1791 data: vec![
1792 0, // delta_line
1793 3, // delta_start
1794 4, // length
1795 0, // token_type
1796 0, // token_modifiers_bitset
1797 ],
1798 result_id: None,
1799 },
1800 )))
1801 },
1802 );
1803
1804 cx.set_state("ˇfn main() {}");
1805 assert!(full_request.next().await.is_some());
1806 cx.run_until_parked();
1807
1808 assert_eq!(
1809 extract_semantic_highlights(&cx.editor, &cx),
1810 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1811 "Semantic tokens should be present before disabling the setting"
1812 );
1813
1814 update_test_language_settings(&mut cx, &|language_settings| {
1815 language_settings.languages.0.insert(
1816 "Rust".into(),
1817 LanguageSettingsContent {
1818 semantic_tokens: Some(SemanticTokens::Off),
1819 ..LanguageSettingsContent::default()
1820 },
1821 );
1822 });
1823 cx.executor().advance_clock(Duration::from_millis(200));
1824 cx.run_until_parked();
1825
1826 assert_eq!(
1827 extract_semantic_highlights(&cx.editor, &cx),
1828 Vec::new(),
1829 "Semantic tokens should be cleared after disabling the setting"
1830 );
1831 }
1832
1833 #[gpui::test]
1834 async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
1835 init_test(cx, |_| {});
1836 update_test_language_settings(cx, &|s| {
1837 s.languages.0.insert(
1838 "Rust".into(),
1839 LanguageSettingsContent {
1840 semantic_tokens: Some(SemanticTokens::Full),
1841 ..Default::default()
1842 },
1843 );
1844 });
1845
1846 let mut cx = EditorLspTestContext::new_rust(
1847 lsp::ServerCapabilities {
1848 semantic_tokens_provider: Some(
1849 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1850 lsp::SemanticTokensOptions {
1851 legend: lsp::SemanticTokensLegend {
1852 token_types: vec!["function".into()],
1853 token_modifiers: vec![],
1854 },
1855 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1856 ..Default::default()
1857 },
1858 ),
1859 ),
1860 ..Default::default()
1861 },
1862 cx,
1863 )
1864 .await;
1865
1866 let mut full_request = cx
1867 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1868 move |_, _, _| async move {
1869 Ok(Some(lsp::SemanticTokensResult::Tokens(
1870 lsp::SemanticTokens {
1871 data: vec![0, 3, 4, 0, 0],
1872 result_id: None,
1873 },
1874 )))
1875 },
1876 );
1877
1878 // Verify it highlights by default
1879 cx.set_state("ˇfn main() {}");
1880 full_request.next().await;
1881 cx.run_until_parked();
1882 assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
1883
1884 // Apply EMPTY rule to disable it
1885 cx.update(|_, cx| {
1886 SettingsStore::update_global(cx, |store, cx| {
1887 store.update_user_settings(cx, |settings| {
1888 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1889 semantic_token_rules: Some(SemanticTokenRules {
1890 rules: vec![SemanticTokenRule {
1891 token_type: Some("function".to_string()),
1892 ..Default::default()
1893 }],
1894 }),
1895 ..Default::default()
1896 });
1897 });
1898 });
1899 });
1900
1901 cx.set_state("ˇfn main() { }");
1902 full_request.next().await;
1903 cx.run_until_parked();
1904
1905 assert!(
1906 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
1907 "Highlighting should be disabled by empty style setting"
1908 );
1909 }
1910
1911 #[gpui::test]
1912 async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
1913 init_test(cx, |_| {});
1914 update_test_language_settings(cx, &|s| {
1915 s.languages.0.insert(
1916 "Rust".into(),
1917 LanguageSettingsContent {
1918 semantic_tokens: Some(SemanticTokens::Full),
1919 ..Default::default()
1920 },
1921 );
1922 });
1923
1924 let mut cx = EditorLspTestContext::new_rust(
1925 lsp::ServerCapabilities {
1926 semantic_tokens_provider: Some(
1927 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1928 lsp::SemanticTokensOptions {
1929 legend: lsp::SemanticTokensLegend {
1930 token_types: vec!["comment".into()],
1931 token_modifiers: vec!["documentation".into()],
1932 },
1933 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1934 ..Default::default()
1935 },
1936 ),
1937 ),
1938 ..Default::default()
1939 },
1940 cx,
1941 )
1942 .await;
1943
1944 let mut full_request = cx
1945 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1946 move |_, _, _| async move {
1947 Ok(Some(lsp::SemanticTokensResult::Tokens(
1948 lsp::SemanticTokens {
1949 data: vec![0, 0, 5, 0, 1], // comment [documentation]
1950 result_id: None,
1951 },
1952 )))
1953 },
1954 );
1955
1956 cx.set_state("ˇ/// d\n");
1957 full_request.next().await;
1958 cx.run_until_parked();
1959 assert_eq!(
1960 extract_semantic_highlights(&cx.editor, &cx).len(),
1961 1,
1962 "Documentation comment should be highlighted"
1963 );
1964
1965 // Apply a BROAD empty rule for "comment" (no modifiers)
1966 cx.update(|_, cx| {
1967 SettingsStore::update_global(cx, |store, cx| {
1968 store.update_user_settings(cx, |settings| {
1969 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1970 semantic_token_rules: Some(SemanticTokenRules {
1971 rules: vec![SemanticTokenRule {
1972 token_type: Some("comment".to_string()),
1973 ..Default::default()
1974 }],
1975 }),
1976 ..Default::default()
1977 });
1978 });
1979 });
1980 });
1981
1982 cx.set_state("ˇ/// d\n");
1983 full_request.next().await;
1984 cx.run_until_parked();
1985
1986 assert!(
1987 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
1988 "Broad empty rule should disable specific documentation comment"
1989 );
1990 }
1991
1992 #[gpui::test]
1993 async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
1994 cx: &mut TestAppContext,
1995 ) {
1996 use gpui::UpdateGlobal as _;
1997 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1998
1999 init_test(cx, |_| {});
2000 update_test_language_settings(cx, &|s| {
2001 s.languages.0.insert(
2002 "Rust".into(),
2003 LanguageSettingsContent {
2004 semantic_tokens: Some(SemanticTokens::Full),
2005 ..Default::default()
2006 },
2007 );
2008 });
2009
2010 let mut cx = EditorLspTestContext::new_rust(
2011 lsp::ServerCapabilities {
2012 semantic_tokens_provider: Some(
2013 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2014 lsp::SemanticTokensOptions {
2015 legend: lsp::SemanticTokensLegend {
2016 token_types: vec!["comment".into()],
2017 token_modifiers: vec!["documentation".into()],
2018 },
2019 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2020 ..Default::default()
2021 },
2022 ),
2023 ),
2024 ..Default::default()
2025 },
2026 cx,
2027 )
2028 .await;
2029
2030 let mut full_request = cx
2031 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2032 move |_, _, _| async move {
2033 Ok(Some(lsp::SemanticTokensResult::Tokens(
2034 lsp::SemanticTokens {
2035 data: vec![
2036 0, 0, 5, 0, 1, // comment [documentation]
2037 1, 0, 5, 0, 0, // normal comment
2038 ],
2039 result_id: None,
2040 },
2041 )))
2042 },
2043 );
2044
2045 cx.set_state("ˇ/// d\n// n\n");
2046 full_request.next().await;
2047 cx.run_until_parked();
2048 assert_eq!(
2049 extract_semantic_highlights(&cx.editor, &cx).len(),
2050 2,
2051 "Both documentation and normal comments should be highlighted initially"
2052 );
2053
2054 // Apply a SPECIFIC empty rule for documentation only
2055 cx.update(|_, cx| {
2056 SettingsStore::update_global(cx, |store, cx| {
2057 store.update_user_settings(cx, |settings| {
2058 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2059 semantic_token_rules: Some(SemanticTokenRules {
2060 rules: vec![SemanticTokenRule {
2061 token_type: Some("comment".to_string()),
2062 token_modifiers: vec!["documentation".to_string()],
2063 ..Default::default()
2064 }],
2065 }),
2066 ..Default::default()
2067 });
2068 });
2069 });
2070 });
2071
2072 cx.set_state("ˇ/// d\n// n\n");
2073 full_request.next().await;
2074 cx.run_until_parked();
2075
2076 assert_eq!(
2077 extract_semantic_highlights(&cx.editor, &cx).len(),
2078 1,
2079 "Normal comment should still be highlighted (matched by default rule)"
2080 );
2081 }
2082
2083 fn extract_semantic_highlight_styles(
2084 editor: &Entity<Editor>,
2085 cx: &TestAppContext,
2086 ) -> Vec<HighlightStyle> {
2087 editor.read_with(cx, |editor, cx| {
2088 editor
2089 .display_map
2090 .read(cx)
2091 .semantic_token_highlights
2092 .iter()
2093 .flat_map(|(_, (v, interner))| {
2094 v.iter().map(|highlights| interner[highlights.style])
2095 })
2096 .collect()
2097 })
2098 }
2099}