1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::LanguageSettings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_buffers(cx)
152 .into_iter()
153 .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
159 .semantic_tokens
160 .enabled()
161 {
162 Some((editor_buffer_id, editor_buffer))
163 } else {
164 None
165 }
166 })
167 .collect::<HashMap<_, _>>();
168
169 for buffer_with_disabled_tokens in self
170 .display_map
171 .read(cx)
172 .semantic_token_highlights
173 .keys()
174 .copied()
175 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
176 .filter(|buffer_id| {
177 !self
178 .buffer
179 .read(cx)
180 .buffer(*buffer_id)
181 .is_some_and(|buffer| {
182 let buffer = buffer.read(cx);
183 LanguageSettings::for_buffer(&buffer, cx)
184 .semantic_tokens
185 .enabled()
186 })
187 })
188 .collect::<Vec<_>>()
189 {
190 self.semantic_token_state
191 .invalidate_buffer(&buffer_with_disabled_tokens);
192 self.display_map.update(cx, |display_map, _| {
193 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
194 });
195 }
196
197 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
198 cx.background_executor()
199 .timer(Duration::from_millis(50))
200 .await;
201 let Some(all_semantic_tokens_task) = editor
202 .update(cx, |editor, cx| {
203 buffers_to_query
204 .into_iter()
205 .filter_map(|(buffer_id, buffer)| {
206 let known_version = editor
207 .semantic_token_state
208 .fetched_for_buffers
209 .get(&buffer_id);
210 let query_version = buffer.read(cx).version();
211 if known_version.is_some_and(|known_version| {
212 !query_version.changed_since(known_version)
213 }) {
214 None
215 } else {
216 sema.semantic_tokens(buffer, for_server, cx).map(
217 |task| async move { (buffer_id, query_version, task.await) },
218 )
219 }
220 })
221 .collect::<Vec<_>>()
222 })
223 .ok()
224 else {
225 return;
226 };
227
228 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
229 editor
230 .update(cx, |editor, cx| {
231 editor.display_map.update(cx, |display_map, _| {
232 for buffer_id in invalidate_semantic_highlights_for_buffers {
233 display_map.invalidate_semantic_highlights(buffer_id);
234 editor.semantic_token_state.invalidate_buffer(&buffer_id);
235 }
236 });
237
238 if all_semantic_tokens.is_empty() {
239 return;
240 }
241 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
242
243 for (buffer_id, query_version, tokens) in all_semantic_tokens {
244 let tokens = match tokens {
245 Ok(BufferSemanticTokens {
246 tokens: Some(tokens),
247 }) => tokens,
248 Ok(BufferSemanticTokens { tokens: None }) => {
249 editor.display_map.update(cx, |display_map, _| {
250 display_map.invalidate_semantic_highlights(buffer_id);
251 });
252 continue;
253 }
254 Err(e) => {
255 log::error!(
256 "Failed to fetch semantic tokens for buffer \
257 {buffer_id:?}: {e:#}"
258 );
259 continue;
260 }
261 };
262
263 match editor
264 .semantic_token_state
265 .fetched_for_buffers
266 .entry(buffer_id)
267 {
268 hash_map::Entry::Occupied(mut o) => {
269 if query_version.changed_since(o.get()) {
270 o.insert(query_version);
271 } else {
272 continue;
273 }
274 }
275 hash_map::Entry::Vacant(v) => {
276 v.insert(query_version);
277 }
278 }
279
280 let language_name = editor
281 .buffer()
282 .read(cx)
283 .buffer(buffer_id)
284 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
285
286 editor.display_map.update(cx, |display_map, cx| {
287 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
288 let mut token_highlights = Vec::new();
289 let mut interner = HighlightStyleInterner::default();
290 for (server_id, server_tokens) in tokens {
291 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
292 server_id,
293 language_name.as_ref(),
294 cx,
295 ) else {
296 continue;
297 };
298 token_highlights.reserve(2 * server_tokens.len());
299 token_highlights.extend(buffer_into_editor_highlights(
300 &server_tokens,
301 stylizer,
302 &multi_buffer_snapshot,
303 &mut interner,
304 cx,
305 ));
306 }
307
308 token_highlights.sort_by(|a, b| {
309 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
310 });
311 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
312 buffer_id,
313 (Arc::from(token_highlights), Arc::new(interner)),
314 );
315 });
316 });
317 }
318
319 cx.notify();
320 })
321 .ok();
322 });
323 }
324}
325
326fn buffer_into_editor_highlights<'a, 'b>(
327 buffer_tokens: &'a [BufferSemanticToken],
328 stylizer: &'a SemanticTokenStylizer,
329 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
330 interner: &'b mut HighlightStyleInterner,
331 cx: &'a App,
332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
333 multi_buffer_snapshot
334 .text_anchors_to_visible_anchors(
335 buffer_tokens
336 .iter()
337 .flat_map(|token| [token.range.start, token.range.end]),
338 )
339 .into_iter()
340 .tuples::<(_, _)>()
341 .zip(buffer_tokens)
342 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
343 let range = multi_buffer_start?..multi_buffer_end?;
344 let style = convert_token(
345 stylizer,
346 cx.theme().syntax(),
347 token.token_type,
348 token.token_modifiers,
349 )?;
350 let style = interner.intern(style);
351 Some(SemanticTokenHighlight {
352 range,
353 style,
354 token_type: token.token_type,
355 token_modifiers: token.token_modifiers,
356 server_id: stylizer.server_id(),
357 })
358 })
359}
360
361fn convert_token(
362 stylizer: &SemanticTokenStylizer,
363 theme: &SyntaxTheme,
364 token_type: TokenType,
365 modifiers: u32,
366) -> Option<HighlightStyle> {
367 let rules = stylizer.rules_for_token(token_type)?;
368 let matching: Vec<_> = rules
369 .iter()
370 .filter(|rule| {
371 rule.token_modifiers
372 .iter()
373 .all(|m| stylizer.has_modifier(modifiers, m))
374 })
375 .collect();
376
377 if let Some(rule) = matching.last() {
378 if rule.no_style_defined() {
379 return None;
380 }
381 }
382
383 let mut highlight = HighlightStyle::default();
384 let mut empty = true;
385
386 for rule in matching {
387 empty = false;
388
389 let style = rule
390 .style
391 .iter()
392 .find_map(|style| theme.style_for_name(style));
393
394 macro_rules! overwrite {
395 (
396 highlight.$highlight_field:ident,
397 SemanticTokenRule::$rule_field:ident,
398 $transform:expr $(,)?
399 ) => {
400 highlight.$highlight_field = rule
401 .$rule_field
402 .map($transform)
403 .or_else(|| style.and_then(|s| s.$highlight_field))
404 .or(highlight.$highlight_field)
405 };
406 }
407
408 overwrite!(
409 highlight.color,
410 SemanticTokenRule::foreground_color,
411 Into::into,
412 );
413
414 overwrite!(
415 highlight.background_color,
416 SemanticTokenRule::background_color,
417 Into::into,
418 );
419
420 overwrite!(
421 highlight.font_weight,
422 SemanticTokenRule::font_weight,
423 |w| match w {
424 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
425 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
426 },
427 );
428
429 overwrite!(
430 highlight.font_style,
431 SemanticTokenRule::font_style,
432 |s| match s {
433 SemanticTokenFontStyle::Normal => FontStyle::Normal,
434 SemanticTokenFontStyle::Italic => FontStyle::Italic,
435 },
436 );
437
438 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
439 UnderlineStyle {
440 thickness: 1.0.into(),
441 color: match u {
442 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
443 SemanticTokenColorOverride::InheritForeground(false) => None,
444 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
445 },
446 ..UnderlineStyle::default()
447 }
448 });
449
450 overwrite!(
451 highlight.strikethrough,
452 SemanticTokenRule::strikethrough,
453 |s| StrikethroughStyle {
454 thickness: 1.0.into(),
455 color: match s {
456 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
457 SemanticTokenColorOverride::InheritForeground(false) => None,
458 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
459 },
460 },
461 );
462 }
463
464 if empty { None } else { Some(highlight) }
465}
466
467#[cfg(test)]
468mod tests {
469 use std::{
470 ops::Range,
471 sync::atomic::{self, AtomicUsize},
472 };
473
474 use futures::StreamExt as _;
475 use gpui::{
476 AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
477 };
478 use language::{Language, LanguageConfig, LanguageMatcher};
479 use languages::FakeLspAdapter;
480 use multi_buffer::{
481 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
482 };
483 use project::Project;
484 use rope::Point;
485 use serde_json::json;
486 use settings::{
487 GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
488 SemanticTokens, SettingsStore,
489 };
490 use workspace::{MultiWorkspace, WorkspaceHandle as _};
491
492 use crate::{
493 Capability,
494 editor_tests::{init_test, update_test_language_settings},
495 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
496 };
497
498 use super::*;
499
500 #[gpui::test]
501 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
502 init_test(cx, |_| {});
503
504 update_test_language_settings(cx, &|language_settings| {
505 language_settings.languages.0.insert(
506 "Rust".into(),
507 LanguageSettingsContent {
508 semantic_tokens: Some(SemanticTokens::Full),
509 ..LanguageSettingsContent::default()
510 },
511 );
512 });
513
514 let mut cx = EditorLspTestContext::new_rust(
515 lsp::ServerCapabilities {
516 semantic_tokens_provider: Some(
517 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
518 lsp::SemanticTokensOptions {
519 legend: lsp::SemanticTokensLegend {
520 token_types: vec!["function".into()],
521 token_modifiers: Vec::new(),
522 },
523 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
524 ..lsp::SemanticTokensOptions::default()
525 },
526 ),
527 ),
528 ..lsp::ServerCapabilities::default()
529 },
530 cx,
531 )
532 .await;
533
534 let full_counter = Arc::new(AtomicUsize::new(0));
535 let full_counter_clone = full_counter.clone();
536
537 let mut full_request = cx
538 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
539 move |_, _, _| {
540 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
541 async move {
542 Ok(Some(lsp::SemanticTokensResult::Tokens(
543 lsp::SemanticTokens {
544 data: vec![
545 0, // delta_line
546 3, // delta_start
547 4, // length
548 0, // token_type
549 0, // token_modifiers_bitset
550 ],
551 // The server isn't capable of deltas, so even though we sent back
552 // a result ID, the client shouldn't request a delta.
553 result_id: Some("a".into()),
554 },
555 )))
556 }
557 },
558 );
559
560 cx.set_state("ˇfn main() {}");
561 assert!(full_request.next().await.is_some());
562
563 cx.run_until_parked();
564
565 cx.set_state("ˇfn main() { a }");
566 assert!(full_request.next().await.is_some());
567
568 cx.run_until_parked();
569
570 assert_eq!(
571 extract_semantic_highlights(&cx.editor, &cx),
572 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
573 );
574
575 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
576 }
577
578 #[gpui::test]
579 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
580 init_test(cx, |_| {});
581
582 update_test_language_settings(cx, &|language_settings| {
583 language_settings.languages.0.insert(
584 "Rust".into(),
585 LanguageSettingsContent {
586 semantic_tokens: Some(SemanticTokens::Full),
587 ..LanguageSettingsContent::default()
588 },
589 );
590 });
591
592 let mut cx = EditorLspTestContext::new_rust(
593 lsp::ServerCapabilities {
594 semantic_tokens_provider: Some(
595 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
596 lsp::SemanticTokensOptions {
597 legend: lsp::SemanticTokensLegend {
598 token_types: vec!["function".into()],
599 token_modifiers: Vec::new(),
600 },
601 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
602 ..lsp::SemanticTokensOptions::default()
603 },
604 ),
605 ),
606 ..lsp::ServerCapabilities::default()
607 },
608 cx,
609 )
610 .await;
611
612 let full_counter = Arc::new(AtomicUsize::new(0));
613 let full_counter_clone = full_counter.clone();
614
615 let mut full_request = cx
616 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
617 move |_, _, _| {
618 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
619 async move {
620 Ok(Some(lsp::SemanticTokensResult::Tokens(
621 lsp::SemanticTokens {
622 data: vec![
623 0, // delta_line
624 3, // delta_start
625 4, // length
626 0, // token_type
627 0, // token_modifiers_bitset
628 ],
629 result_id: None, // Sending back `None` forces the client to not use deltas.
630 },
631 )))
632 }
633 },
634 );
635
636 cx.set_state("ˇfn main() {}");
637 assert!(full_request.next().await.is_some());
638
639 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
640 task.await;
641
642 cx.set_state("ˇfn main() { a }");
643 assert!(full_request.next().await.is_some());
644
645 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
646 task.await;
647 assert_eq!(
648 extract_semantic_highlights(&cx.editor, &cx),
649 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
650 );
651 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
652 }
653
654 #[gpui::test]
655 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
656 init_test(cx, |_| {});
657
658 update_test_language_settings(cx, &|language_settings| {
659 language_settings.languages.0.insert(
660 "Rust".into(),
661 LanguageSettingsContent {
662 semantic_tokens: Some(SemanticTokens::Full),
663 ..LanguageSettingsContent::default()
664 },
665 );
666 });
667
668 let mut cx = EditorLspTestContext::new_rust(
669 lsp::ServerCapabilities {
670 semantic_tokens_provider: Some(
671 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
672 lsp::SemanticTokensOptions {
673 legend: lsp::SemanticTokensLegend {
674 token_types: vec!["function".into()],
675 token_modifiers: Vec::new(),
676 },
677 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
678 ..lsp::SemanticTokensOptions::default()
679 },
680 ),
681 ),
682 ..lsp::ServerCapabilities::default()
683 },
684 cx,
685 )
686 .await;
687
688 let full_counter = Arc::new(AtomicUsize::new(0));
689 let full_counter_clone = full_counter.clone();
690 let delta_counter = Arc::new(AtomicUsize::new(0));
691 let delta_counter_clone = delta_counter.clone();
692
693 let mut full_request = cx
694 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
695 move |_, _, _| {
696 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
697 async move {
698 Ok(Some(lsp::SemanticTokensResult::Tokens(
699 lsp::SemanticTokens {
700 data: vec![
701 0, // delta_line
702 3, // delta_start
703 4, // length
704 0, // token_type
705 0, // token_modifiers_bitset
706 ],
707 result_id: Some("a".into()),
708 },
709 )))
710 }
711 },
712 );
713
714 let mut delta_request = cx
715 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
716 move |_, params, _| {
717 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
718 assert_eq!(params.previous_result_id, "a");
719 async move {
720 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
721 lsp::SemanticTokensDelta {
722 edits: Vec::new(),
723 result_id: Some("b".into()),
724 },
725 )))
726 }
727 },
728 );
729
730 // Initial request, for the empty buffer.
731 cx.set_state("ˇfn main() {}");
732 assert!(full_request.next().await.is_some());
733 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
734 task.await;
735
736 cx.set_state("ˇfn main() { a }");
737 assert!(delta_request.next().await.is_some());
738 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
739 task.await;
740
741 assert_eq!(
742 extract_semantic_highlights(&cx.editor, &cx),
743 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
744 );
745
746 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
747 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
748 }
749
750 #[gpui::test]
751 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
752 init_test(cx, |_| {});
753
754 update_test_language_settings(cx, &|language_settings| {
755 language_settings.languages.0.insert(
756 "TOML".into(),
757 LanguageSettingsContent {
758 semantic_tokens: Some(SemanticTokens::Full),
759 ..LanguageSettingsContent::default()
760 },
761 );
762 });
763
764 let toml_language = Arc::new(Language::new(
765 LanguageConfig {
766 name: "TOML".into(),
767 matcher: LanguageMatcher {
768 path_suffixes: vec!["toml".into()],
769 ..LanguageMatcher::default()
770 },
771 ..LanguageConfig::default()
772 },
773 None,
774 ));
775
776 // We have 2 language servers for TOML in this test.
777 let toml_legend_1 = lsp::SemanticTokensLegend {
778 token_types: vec!["property".into()],
779 token_modifiers: Vec::new(),
780 };
781 let toml_legend_2 = lsp::SemanticTokensLegend {
782 token_types: vec!["number".into()],
783 token_modifiers: Vec::new(),
784 };
785
786 let app_state = cx.update(workspace::AppState::test);
787
788 cx.update(|cx| {
789 assets::Assets.load_test_fonts(cx);
790 crate::init(cx);
791 workspace::init(app_state.clone(), cx);
792 });
793
794 let project = Project::test(app_state.fs.clone(), [], cx).await;
795 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
796
797 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
798 let full_counter_toml_1_clone = full_counter_toml_1.clone();
799 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
800 let full_counter_toml_2_clone = full_counter_toml_2.clone();
801
802 let mut toml_server_1 = language_registry.register_fake_lsp(
803 toml_language.name(),
804 FakeLspAdapter {
805 name: "toml1",
806 capabilities: lsp::ServerCapabilities {
807 semantic_tokens_provider: Some(
808 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
809 lsp::SemanticTokensOptions {
810 legend: toml_legend_1,
811 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
812 ..lsp::SemanticTokensOptions::default()
813 },
814 ),
815 ),
816 ..lsp::ServerCapabilities::default()
817 },
818 initializer: Some(Box::new({
819 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
820 move |fake_server| {
821 let full_counter = full_counter_toml_1_clone.clone();
822 fake_server
823 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
824 move |_, _| {
825 full_counter.fetch_add(1, atomic::Ordering::Release);
826 async move {
827 Ok(Some(lsp::SemanticTokensResult::Tokens(
828 lsp::SemanticTokens {
829 // highlight 'a' as a property
830 data: vec![
831 0, // delta_line
832 0, // delta_start
833 1, // length
834 0, // token_type
835 0, // token_modifiers_bitset
836 ],
837 result_id: Some("a".into()),
838 },
839 )))
840 }
841 },
842 );
843 }
844 })),
845 ..FakeLspAdapter::default()
846 },
847 );
848 let mut toml_server_2 = language_registry.register_fake_lsp(
849 toml_language.name(),
850 FakeLspAdapter {
851 name: "toml2",
852 capabilities: lsp::ServerCapabilities {
853 semantic_tokens_provider: Some(
854 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
855 lsp::SemanticTokensOptions {
856 legend: toml_legend_2,
857 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
858 ..lsp::SemanticTokensOptions::default()
859 },
860 ),
861 ),
862 ..lsp::ServerCapabilities::default()
863 },
864 initializer: Some(Box::new({
865 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
866 move |fake_server| {
867 let full_counter = full_counter_toml_2_clone.clone();
868 fake_server
869 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
870 move |_, _| {
871 full_counter.fetch_add(1, atomic::Ordering::Release);
872 async move {
873 Ok(Some(lsp::SemanticTokensResult::Tokens(
874 lsp::SemanticTokens {
875 // highlight '3' as a literal
876 data: vec![
877 0, // delta_line
878 4, // delta_start
879 1, // length
880 0, // token_type
881 0, // token_modifiers_bitset
882 ],
883 result_id: Some("a".into()),
884 },
885 )))
886 }
887 },
888 );
889 }
890 })),
891 ..FakeLspAdapter::default()
892 },
893 );
894 language_registry.add(toml_language.clone());
895
896 app_state
897 .fs
898 .as_fake()
899 .insert_tree(
900 EditorLspTestContext::root_path(),
901 json!({
902 ".git": {},
903 "dir": {
904 "foo.toml": "a = 1\nb = 2\n",
905 }
906 }),
907 )
908 .await;
909
910 let (multi_workspace, cx) =
911 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
912 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
913 project
914 .update(cx, |project, cx| {
915 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
916 })
917 .await
918 .unwrap();
919 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
920 .await;
921
922 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
923 let toml_item = workspace
924 .update_in(cx, |workspace, window, cx| {
925 workspace.open_path(toml_file, None, true, window, cx)
926 })
927 .await
928 .expect("Could not open test file");
929
930 let editor = cx.update(|_, cx| {
931 toml_item
932 .act_as::<Editor>(cx)
933 .expect("Opened test file wasn't an editor")
934 });
935
936 editor.update_in(cx, |editor, window, cx| {
937 let nav_history = workspace
938 .read(cx)
939 .active_pane()
940 .read(cx)
941 .nav_history_for_item(&cx.entity());
942 editor.set_nav_history(Some(nav_history));
943 window.focus(&editor.focus_handle(cx), cx)
944 });
945
946 let _toml_server_1 = toml_server_1.next().await.unwrap();
947 let _toml_server_2 = toml_server_2.next().await.unwrap();
948
949 // Trigger semantic tokens.
950 editor.update_in(cx, |editor, _, cx| {
951 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
952 });
953 cx.executor().advance_clock(Duration::from_millis(200));
954 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
955 cx.run_until_parked();
956 task.await;
957
958 assert_eq!(
959 extract_semantic_highlights(&editor, &cx),
960 vec![
961 MultiBufferOffset(0)..MultiBufferOffset(1),
962 MultiBufferOffset(4)..MultiBufferOffset(5),
963 ]
964 );
965
966 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
967 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
968 }
969
970 #[gpui::test]
971 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
972 init_test(cx, |_| {});
973
974 update_test_language_settings(cx, &|language_settings| {
975 language_settings.languages.0.insert(
976 "TOML".into(),
977 LanguageSettingsContent {
978 semantic_tokens: Some(SemanticTokens::Full),
979 ..LanguageSettingsContent::default()
980 },
981 );
982 language_settings.languages.0.insert(
983 "Rust".into(),
984 LanguageSettingsContent {
985 semantic_tokens: Some(SemanticTokens::Full),
986 ..LanguageSettingsContent::default()
987 },
988 );
989 });
990
991 let toml_language = Arc::new(Language::new(
992 LanguageConfig {
993 name: "TOML".into(),
994 matcher: LanguageMatcher {
995 path_suffixes: vec!["toml".into()],
996 ..LanguageMatcher::default()
997 },
998 ..LanguageConfig::default()
999 },
1000 None,
1001 ));
1002 let rust_language = Arc::new(Language::new(
1003 LanguageConfig {
1004 name: "Rust".into(),
1005 matcher: LanguageMatcher {
1006 path_suffixes: vec!["rs".into()],
1007 ..LanguageMatcher::default()
1008 },
1009 ..LanguageConfig::default()
1010 },
1011 None,
1012 ));
1013
1014 let toml_legend = lsp::SemanticTokensLegend {
1015 token_types: vec!["property".into()],
1016 token_modifiers: Vec::new(),
1017 };
1018 let rust_legend = lsp::SemanticTokensLegend {
1019 token_types: vec!["constant".into()],
1020 token_modifiers: Vec::new(),
1021 };
1022
1023 let app_state = cx.update(workspace::AppState::test);
1024
1025 cx.update(|cx| {
1026 assets::Assets.load_test_fonts(cx);
1027 crate::init(cx);
1028 workspace::init(app_state.clone(), cx);
1029 });
1030
1031 let project = Project::test(app_state.fs.clone(), [], cx).await;
1032 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1033 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1034 let full_counter_toml_clone = full_counter_toml.clone();
1035
1036 let mut toml_server = language_registry.register_fake_lsp(
1037 toml_language.name(),
1038 FakeLspAdapter {
1039 name: "toml",
1040 capabilities: lsp::ServerCapabilities {
1041 semantic_tokens_provider: Some(
1042 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1043 lsp::SemanticTokensOptions {
1044 legend: toml_legend,
1045 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1046 ..lsp::SemanticTokensOptions::default()
1047 },
1048 ),
1049 ),
1050 ..lsp::ServerCapabilities::default()
1051 },
1052 initializer: Some(Box::new({
1053 let full_counter_toml_clone = full_counter_toml_clone.clone();
1054 move |fake_server| {
1055 let full_counter = full_counter_toml_clone.clone();
1056 fake_server
1057 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1058 move |_, _| {
1059 full_counter.fetch_add(1, atomic::Ordering::Release);
1060 async move {
1061 Ok(Some(lsp::SemanticTokensResult::Tokens(
1062 lsp::SemanticTokens {
1063 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1064 data: vec![
1065 0, // delta_line (line 0)
1066 0, // delta_start
1067 1, // length
1068 0, // token_type
1069 0, // token_modifiers_bitset
1070 1, // delta_line (line 1)
1071 0, // delta_start
1072 1, // length
1073 0, // token_type
1074 0, // token_modifiers_bitset
1075 1, // delta_line (line 2)
1076 0, // delta_start
1077 1, // length
1078 0, // token_type
1079 0, // token_modifiers_bitset
1080 ],
1081 result_id: Some("a".into()),
1082 },
1083 )))
1084 }
1085 },
1086 );
1087 }
1088 })),
1089 ..FakeLspAdapter::default()
1090 },
1091 );
1092 language_registry.add(toml_language.clone());
1093 let mut rust_server = language_registry.register_fake_lsp(
1094 rust_language.name(),
1095 FakeLspAdapter {
1096 name: "rust",
1097 capabilities: lsp::ServerCapabilities {
1098 semantic_tokens_provider: Some(
1099 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1100 lsp::SemanticTokensOptions {
1101 legend: rust_legend,
1102 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1103 ..lsp::SemanticTokensOptions::default()
1104 },
1105 ),
1106 ),
1107 ..lsp::ServerCapabilities::default()
1108 },
1109 ..FakeLspAdapter::default()
1110 },
1111 );
1112 language_registry.add(rust_language.clone());
1113
1114 app_state
1115 .fs
1116 .as_fake()
1117 .insert_tree(
1118 EditorLspTestContext::root_path(),
1119 json!({
1120 ".git": {},
1121 "dir": {
1122 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1123 "bar.rs": "const c: usize = 3;\n",
1124 }
1125 }),
1126 )
1127 .await;
1128
1129 let (multi_workspace, cx) =
1130 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1131 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1132 project
1133 .update(cx, |project, cx| {
1134 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1135 })
1136 .await
1137 .unwrap();
1138 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1139 .await;
1140
1141 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1142 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1143 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1144 (
1145 workspace.open_path(toml_file, None, true, window, cx),
1146 workspace.open_path(rust_file, None, true, window, cx),
1147 )
1148 });
1149 let toml_item = toml_item.await.expect("Could not open test file");
1150 let rust_item = rust_item.await.expect("Could not open test file");
1151
1152 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1153 (
1154 toml_item
1155 .act_as::<Editor>(cx)
1156 .expect("Opened test file wasn't an editor"),
1157 rust_item
1158 .act_as::<Editor>(cx)
1159 .expect("Opened test file wasn't an editor"),
1160 )
1161 });
1162 let toml_buffer = cx.read(|cx| {
1163 toml_editor
1164 .read(cx)
1165 .buffer()
1166 .read(cx)
1167 .as_singleton()
1168 .unwrap()
1169 });
1170 let rust_buffer = cx.read(|cx| {
1171 rust_editor
1172 .read(cx)
1173 .buffer()
1174 .read(cx)
1175 .as_singleton()
1176 .unwrap()
1177 });
1178 let multibuffer = cx.new(|cx| {
1179 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1180 multibuffer.set_excerpts_for_path(
1181 PathKey::sorted(0),
1182 toml_buffer.clone(),
1183 [Point::new(0, 0)..Point::new(0, 4)],
1184 0,
1185 cx,
1186 );
1187 multibuffer.set_excerpts_for_path(
1188 PathKey::sorted(1),
1189 rust_buffer.clone(),
1190 [Point::new(0, 0)..Point::new(0, 4)],
1191 0,
1192 cx,
1193 );
1194 multibuffer
1195 });
1196
1197 let editor = workspace.update_in(cx, |workspace, window, cx| {
1198 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1199 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1200 editor
1201 });
1202 editor.update_in(cx, |editor, window, cx| {
1203 let nav_history = workspace
1204 .read(cx)
1205 .active_pane()
1206 .read(cx)
1207 .nav_history_for_item(&cx.entity());
1208 editor.set_nav_history(Some(nav_history));
1209 window.focus(&editor.focus_handle(cx), cx)
1210 });
1211
1212 let _toml_server = toml_server.next().await.unwrap();
1213 let _rust_server = rust_server.next().await.unwrap();
1214
1215 // Initial request.
1216 cx.executor().advance_clock(Duration::from_millis(200));
1217 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1218 cx.run_until_parked();
1219 task.await;
1220 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1221 cx.run_until_parked();
1222
1223 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1224 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1225 assert_eq!(
1226 extract_semantic_highlights(&editor, &cx),
1227 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1228 );
1229
1230 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1231 let toml_anchor = editor.read_with(cx, |editor, cx| {
1232 editor
1233 .buffer()
1234 .read(cx)
1235 .snapshot(cx)
1236 .anchor_in_excerpt(text::Anchor::min_for_buffer(
1237 toml_buffer.read(cx).remote_id(),
1238 ))
1239 .unwrap()
1240 });
1241 editor.update_in(cx, |editor, _, cx| {
1242 editor.buffer().update(cx, |buffer, cx| {
1243 buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
1244 });
1245 });
1246
1247 // Wait for semantic tokens to be re-fetched after expansion.
1248 cx.executor().advance_clock(Duration::from_millis(200));
1249 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1250 cx.run_until_parked();
1251 task.await;
1252
1253 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1254 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1255 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1256 assert_eq!(
1257 extract_semantic_highlights(&editor, &cx),
1258 vec![
1259 MultiBufferOffset(0)..MultiBufferOffset(1),
1260 MultiBufferOffset(6)..MultiBufferOffset(7),
1261 MultiBufferOffset(12)..MultiBufferOffset(13),
1262 ]
1263 );
1264 }
1265
1266 fn extract_semantic_highlights(
1267 editor: &Entity<Editor>,
1268 cx: &TestAppContext,
1269 ) -> Vec<Range<MultiBufferOffset>> {
1270 editor.read_with(cx, |editor, cx| {
1271 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1272 editor
1273 .display_map
1274 .read(cx)
1275 .semantic_token_highlights
1276 .iter()
1277 .flat_map(|(_, (v, _))| v.iter())
1278 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1279 .collect()
1280 })
1281 }
1282
1283 #[gpui::test]
1284 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1285 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1286 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1287
1288 init_test(cx, |_| {});
1289
1290 update_test_language_settings(cx, &|language_settings| {
1291 language_settings.languages.0.insert(
1292 "Rust".into(),
1293 LanguageSettingsContent {
1294 semantic_tokens: Some(SemanticTokens::Full),
1295 ..LanguageSettingsContent::default()
1296 },
1297 );
1298 });
1299
1300 let mut cx = EditorLspTestContext::new_rust(
1301 lsp::ServerCapabilities {
1302 semantic_tokens_provider: Some(
1303 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1304 lsp::SemanticTokensOptions {
1305 legend: lsp::SemanticTokensLegend {
1306 token_types: Vec::from(["function".into()]),
1307 token_modifiers: Vec::new(),
1308 },
1309 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1310 ..lsp::SemanticTokensOptions::default()
1311 },
1312 ),
1313 ),
1314 ..lsp::ServerCapabilities::default()
1315 },
1316 cx,
1317 )
1318 .await;
1319
1320 let mut full_request = cx
1321 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1322 move |_, _, _| {
1323 async move {
1324 Ok(Some(lsp::SemanticTokensResult::Tokens(
1325 lsp::SemanticTokens {
1326 data: vec![
1327 0, // delta_line
1328 3, // delta_start
1329 4, // length
1330 0, // token_type (function)
1331 0, // token_modifiers_bitset
1332 ],
1333 result_id: None,
1334 },
1335 )))
1336 }
1337 },
1338 );
1339
1340 // Trigger initial semantic tokens fetch
1341 cx.set_state("ˇfn main() {}");
1342 full_request.next().await;
1343 cx.run_until_parked();
1344
1345 // Verify initial highlights exist (with no custom color yet)
1346 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1347 assert_eq!(
1348 initial_ranges,
1349 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1350 "Should have initial semantic token highlights"
1351 );
1352 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1353 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1354 // Initial color should be None or theme default (not red or blue)
1355 let initial_color = initial_styles[0].color;
1356
1357 // Set a custom foreground color for function tokens via settings.json
1358 let red_color = Rgba {
1359 r: 1.0,
1360 g: 0.0,
1361 b: 0.0,
1362 a: 1.0,
1363 };
1364 cx.update(|_, cx| {
1365 SettingsStore::update_global(cx, |store, cx| {
1366 store.update_user_settings(cx, |settings| {
1367 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1368 semantic_token_rules: Some(SemanticTokenRules {
1369 rules: Vec::from([SemanticTokenRule {
1370 token_type: Some("function".to_string()),
1371 foreground_color: Some(red_color),
1372 ..SemanticTokenRule::default()
1373 }]),
1374 }),
1375 ..GlobalLspSettingsContent::default()
1376 });
1377 });
1378 });
1379 });
1380
1381 // Trigger a refetch by making an edit (which forces semantic tokens update)
1382 cx.set_state("ˇfn main() { }");
1383 full_request.next().await;
1384 cx.run_until_parked();
1385
1386 // Verify the highlights now have the custom red color
1387 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1388 assert_eq!(
1389 styles_after_settings_change.len(),
1390 1,
1391 "Should still have one highlight"
1392 );
1393 assert_eq!(
1394 styles_after_settings_change[0].color,
1395 Some(Hsla::from(red_color)),
1396 "Highlight should have the custom red color from settings.json"
1397 );
1398 assert_ne!(
1399 styles_after_settings_change[0].color, initial_color,
1400 "Color should have changed from initial"
1401 );
1402 }
1403
1404 #[gpui::test]
1405 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1406 use collections::IndexMap;
1407 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1408 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1409
1410 init_test(cx, |_| {});
1411
1412 update_test_language_settings(cx, &|language_settings| {
1413 language_settings.languages.0.insert(
1414 "Rust".into(),
1415 LanguageSettingsContent {
1416 semantic_tokens: Some(SemanticTokens::Full),
1417 ..LanguageSettingsContent::default()
1418 },
1419 );
1420 });
1421
1422 let mut cx = EditorLspTestContext::new_rust(
1423 lsp::ServerCapabilities {
1424 semantic_tokens_provider: Some(
1425 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1426 lsp::SemanticTokensOptions {
1427 legend: lsp::SemanticTokensLegend {
1428 token_types: Vec::from(["function".into()]),
1429 token_modifiers: Vec::new(),
1430 },
1431 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1432 ..lsp::SemanticTokensOptions::default()
1433 },
1434 ),
1435 ),
1436 ..lsp::ServerCapabilities::default()
1437 },
1438 cx,
1439 )
1440 .await;
1441
1442 let mut full_request = cx
1443 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1444 move |_, _, _| async move {
1445 Ok(Some(lsp::SemanticTokensResult::Tokens(
1446 lsp::SemanticTokens {
1447 data: vec![
1448 0, // delta_line
1449 3, // delta_start
1450 4, // length
1451 0, // token_type (function)
1452 0, // token_modifiers_bitset
1453 ],
1454 result_id: None,
1455 },
1456 )))
1457 },
1458 );
1459
1460 cx.set_state("ˇfn main() {}");
1461 full_request.next().await;
1462 cx.run_until_parked();
1463
1464 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1465 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1466 let initial_color = initial_styles[0].color;
1467
1468 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1469 // which fires theme_changed → refresh_semantic_token_highlights.
1470 let red_color: Hsla = Rgba {
1471 r: 1.0,
1472 g: 0.0,
1473 b: 0.0,
1474 a: 1.0,
1475 }
1476 .into();
1477 cx.update(|_, cx| {
1478 SettingsStore::update_global(cx, |store, cx| {
1479 store.update_user_settings(cx, |settings| {
1480 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1481 syntax: IndexMap::from_iter([(
1482 "function".to_string(),
1483 HighlightStyleContent {
1484 color: Some("#ff0000".to_string()),
1485 background_color: None,
1486 font_style: None,
1487 font_weight: None,
1488 },
1489 )]),
1490 ..ThemeStyleContent::default()
1491 });
1492 });
1493 });
1494 });
1495
1496 cx.executor().advance_clock(Duration::from_millis(200));
1497 cx.run_until_parked();
1498
1499 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1500 assert_eq!(styles_after_override.len(), 1);
1501 assert_eq!(
1502 styles_after_override[0].color,
1503 Some(red_color),
1504 "Highlight should have red color from theme override"
1505 );
1506 assert_ne!(
1507 styles_after_override[0].color, initial_color,
1508 "Color should have changed from initial"
1509 );
1510
1511 // Changing the override to a different color also restyles.
1512 let blue_color: Hsla = Rgba {
1513 r: 0.0,
1514 g: 0.0,
1515 b: 1.0,
1516 a: 1.0,
1517 }
1518 .into();
1519 cx.update(|_, cx| {
1520 SettingsStore::update_global(cx, |store, cx| {
1521 store.update_user_settings(cx, |settings| {
1522 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1523 syntax: IndexMap::from_iter([(
1524 "function".to_string(),
1525 HighlightStyleContent {
1526 color: Some("#0000ff".to_string()),
1527 background_color: None,
1528 font_style: None,
1529 font_weight: None,
1530 },
1531 )]),
1532 ..ThemeStyleContent::default()
1533 });
1534 });
1535 });
1536 });
1537
1538 cx.executor().advance_clock(Duration::from_millis(200));
1539 cx.run_until_parked();
1540
1541 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1542 assert_eq!(styles_after_second_override.len(), 1);
1543 assert_eq!(
1544 styles_after_second_override[0].color,
1545 Some(blue_color),
1546 "Highlight should have blue color from updated theme override"
1547 );
1548
1549 // Removing overrides reverts to the original theme color.
1550 cx.update(|_, cx| {
1551 SettingsStore::update_global(cx, |store, cx| {
1552 store.update_user_settings(cx, |settings| {
1553 settings.theme.experimental_theme_overrides = None;
1554 });
1555 });
1556 });
1557
1558 cx.executor().advance_clock(Duration::from_millis(200));
1559 cx.run_until_parked();
1560
1561 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1562 assert_eq!(styles_after_clear.len(), 1);
1563 assert_eq!(
1564 styles_after_clear[0].color, initial_color,
1565 "Highlight should revert to initial color after clearing overrides"
1566 );
1567 }
1568
1569 #[gpui::test]
1570 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1571 use collections::IndexMap;
1572 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1573 use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1574 use ui::ActiveTheme as _;
1575
1576 init_test(cx, |_| {});
1577
1578 update_test_language_settings(cx, &|language_settings| {
1579 language_settings.languages.0.insert(
1580 "Rust".into(),
1581 LanguageSettingsContent {
1582 semantic_tokens: Some(SemanticTokens::Full),
1583 ..LanguageSettingsContent::default()
1584 },
1585 );
1586 });
1587
1588 let mut cx = EditorLspTestContext::new_rust(
1589 lsp::ServerCapabilities {
1590 semantic_tokens_provider: Some(
1591 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1592 lsp::SemanticTokensOptions {
1593 legend: lsp::SemanticTokensLegend {
1594 token_types: Vec::from(["function".into()]),
1595 token_modifiers: Vec::new(),
1596 },
1597 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1598 ..lsp::SemanticTokensOptions::default()
1599 },
1600 ),
1601 ),
1602 ..lsp::ServerCapabilities::default()
1603 },
1604 cx,
1605 )
1606 .await;
1607
1608 let mut full_request = cx
1609 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1610 move |_, _, _| async move {
1611 Ok(Some(lsp::SemanticTokensResult::Tokens(
1612 lsp::SemanticTokens {
1613 data: vec![
1614 0, // delta_line
1615 3, // delta_start
1616 4, // length
1617 0, // token_type (function)
1618 0, // token_modifiers_bitset
1619 ],
1620 result_id: None,
1621 },
1622 )))
1623 },
1624 );
1625
1626 cx.set_state("ˇfn main() {}");
1627 full_request.next().await;
1628 cx.run_until_parked();
1629
1630 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1631 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1632 let initial_color = initial_styles[0].color;
1633
1634 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1635 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1636 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1637 let green_color: Hsla = Rgba {
1638 r: 0.0,
1639 g: 1.0,
1640 b: 0.0,
1641 a: 1.0,
1642 }
1643 .into();
1644 cx.update(|_, cx| {
1645 SettingsStore::update_global(cx, |store, cx| {
1646 store.update_user_settings(cx, |settings| {
1647 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1648 theme_name.clone(),
1649 ThemeStyleContent {
1650 syntax: IndexMap::from_iter([(
1651 "function".to_string(),
1652 HighlightStyleContent {
1653 color: Some("#00ff00".to_string()),
1654 background_color: None,
1655 font_style: None,
1656 font_weight: None,
1657 },
1658 )]),
1659 ..ThemeStyleContent::default()
1660 },
1661 )]);
1662 });
1663 });
1664 });
1665
1666 cx.executor().advance_clock(Duration::from_millis(200));
1667 cx.run_until_parked();
1668
1669 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1670 assert_eq!(styles_after_override.len(), 1);
1671 assert_eq!(
1672 styles_after_override[0].color,
1673 Some(green_color),
1674 "Highlight should have green color from per-theme override"
1675 );
1676 assert_ne!(
1677 styles_after_override[0].color, initial_color,
1678 "Color should have changed from initial"
1679 );
1680 }
1681
1682 #[gpui::test]
1683 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1684 init_test(cx, |_| {});
1685
1686 update_test_language_settings(cx, &|language_settings| {
1687 language_settings.languages.0.insert(
1688 "Rust".into(),
1689 LanguageSettingsContent {
1690 semantic_tokens: Some(SemanticTokens::Full),
1691 ..LanguageSettingsContent::default()
1692 },
1693 );
1694 });
1695
1696 let mut cx = EditorLspTestContext::new_rust(
1697 lsp::ServerCapabilities {
1698 semantic_tokens_provider: Some(
1699 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1700 lsp::SemanticTokensOptions {
1701 legend: lsp::SemanticTokensLegend {
1702 token_types: vec!["function".into()],
1703 token_modifiers: Vec::new(),
1704 },
1705 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1706 ..lsp::SemanticTokensOptions::default()
1707 },
1708 ),
1709 ),
1710 ..lsp::ServerCapabilities::default()
1711 },
1712 cx,
1713 )
1714 .await;
1715
1716 let mut full_request = cx
1717 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1718 move |_, _, _| async move {
1719 Ok(Some(lsp::SemanticTokensResult::Tokens(
1720 lsp::SemanticTokens {
1721 data: vec![
1722 0, // delta_line
1723 3, // delta_start
1724 4, // length
1725 0, // token_type
1726 0, // token_modifiers_bitset
1727 ],
1728 result_id: None,
1729 },
1730 )))
1731 },
1732 );
1733
1734 cx.set_state("ˇfn main() {}");
1735 assert!(full_request.next().await.is_some());
1736 cx.run_until_parked();
1737
1738 assert_eq!(
1739 extract_semantic_highlights(&cx.editor, &cx),
1740 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1741 "Semantic tokens should be present before stopping the server"
1742 );
1743
1744 cx.update_editor(|editor, _, cx| {
1745 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1746 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1747 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1748 })
1749 });
1750 cx.executor().advance_clock(Duration::from_millis(200));
1751 cx.run_until_parked();
1752
1753 assert_eq!(
1754 extract_semantic_highlights(&cx.editor, &cx),
1755 Vec::new(),
1756 "Semantic tokens should be cleared after stopping the server"
1757 );
1758 }
1759
1760 #[gpui::test]
1761 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1762 init_test(cx, |_| {});
1763
1764 update_test_language_settings(cx, &|language_settings| {
1765 language_settings.languages.0.insert(
1766 "Rust".into(),
1767 LanguageSettingsContent {
1768 semantic_tokens: Some(SemanticTokens::Full),
1769 ..LanguageSettingsContent::default()
1770 },
1771 );
1772 });
1773
1774 let mut cx = EditorLspTestContext::new_rust(
1775 lsp::ServerCapabilities {
1776 semantic_tokens_provider: Some(
1777 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1778 lsp::SemanticTokensOptions {
1779 legend: lsp::SemanticTokensLegend {
1780 token_types: vec!["function".into()],
1781 token_modifiers: Vec::new(),
1782 },
1783 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1784 ..lsp::SemanticTokensOptions::default()
1785 },
1786 ),
1787 ),
1788 ..lsp::ServerCapabilities::default()
1789 },
1790 cx,
1791 )
1792 .await;
1793
1794 let mut full_request = cx
1795 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1796 move |_, _, _| async move {
1797 Ok(Some(lsp::SemanticTokensResult::Tokens(
1798 lsp::SemanticTokens {
1799 data: vec![
1800 0, // delta_line
1801 3, // delta_start
1802 4, // length
1803 0, // token_type
1804 0, // token_modifiers_bitset
1805 ],
1806 result_id: None,
1807 },
1808 )))
1809 },
1810 );
1811
1812 cx.set_state("ˇfn main() {}");
1813 assert!(full_request.next().await.is_some());
1814 cx.run_until_parked();
1815
1816 assert_eq!(
1817 extract_semantic_highlights(&cx.editor, &cx),
1818 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1819 "Semantic tokens should be present before disabling the setting"
1820 );
1821
1822 update_test_language_settings(&mut cx, &|language_settings| {
1823 language_settings.languages.0.insert(
1824 "Rust".into(),
1825 LanguageSettingsContent {
1826 semantic_tokens: Some(SemanticTokens::Off),
1827 ..LanguageSettingsContent::default()
1828 },
1829 );
1830 });
1831 cx.executor().advance_clock(Duration::from_millis(200));
1832 cx.run_until_parked();
1833
1834 assert_eq!(
1835 extract_semantic_highlights(&cx.editor, &cx),
1836 Vec::new(),
1837 "Semantic tokens should be cleared after disabling the setting"
1838 );
1839 }
1840
1841 #[gpui::test]
1842 async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
1843 init_test(cx, |_| {});
1844 update_test_language_settings(cx, &|s| {
1845 s.languages.0.insert(
1846 "Rust".into(),
1847 LanguageSettingsContent {
1848 semantic_tokens: Some(SemanticTokens::Full),
1849 ..Default::default()
1850 },
1851 );
1852 });
1853
1854 let mut cx = EditorLspTestContext::new_rust(
1855 lsp::ServerCapabilities {
1856 semantic_tokens_provider: Some(
1857 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1858 lsp::SemanticTokensOptions {
1859 legend: lsp::SemanticTokensLegend {
1860 token_types: vec!["function".into()],
1861 token_modifiers: vec![],
1862 },
1863 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1864 ..Default::default()
1865 },
1866 ),
1867 ),
1868 ..Default::default()
1869 },
1870 cx,
1871 )
1872 .await;
1873
1874 let mut full_request = cx
1875 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1876 move |_, _, _| async move {
1877 Ok(Some(lsp::SemanticTokensResult::Tokens(
1878 lsp::SemanticTokens {
1879 data: vec![0, 3, 4, 0, 0],
1880 result_id: None,
1881 },
1882 )))
1883 },
1884 );
1885
1886 // Verify it highlights by default
1887 cx.set_state("ˇfn main() {}");
1888 full_request.next().await;
1889 cx.run_until_parked();
1890 assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
1891
1892 // Apply EMPTY rule to disable it
1893 cx.update(|_, cx| {
1894 SettingsStore::update_global(cx, |store, cx| {
1895 store.update_user_settings(cx, |settings| {
1896 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1897 semantic_token_rules: Some(SemanticTokenRules {
1898 rules: vec![SemanticTokenRule {
1899 token_type: Some("function".to_string()),
1900 ..Default::default()
1901 }],
1902 }),
1903 ..Default::default()
1904 });
1905 });
1906 });
1907 });
1908
1909 cx.set_state("ˇfn main() { }");
1910 full_request.next().await;
1911 cx.run_until_parked();
1912
1913 assert!(
1914 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
1915 "Highlighting should be disabled by empty style setting"
1916 );
1917 }
1918
1919 #[gpui::test]
1920 async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
1921 init_test(cx, |_| {});
1922 update_test_language_settings(cx, &|s| {
1923 s.languages.0.insert(
1924 "Rust".into(),
1925 LanguageSettingsContent {
1926 semantic_tokens: Some(SemanticTokens::Full),
1927 ..Default::default()
1928 },
1929 );
1930 });
1931
1932 let mut cx = EditorLspTestContext::new_rust(
1933 lsp::ServerCapabilities {
1934 semantic_tokens_provider: Some(
1935 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1936 lsp::SemanticTokensOptions {
1937 legend: lsp::SemanticTokensLegend {
1938 token_types: vec!["comment".into()],
1939 token_modifiers: vec!["documentation".into()],
1940 },
1941 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1942 ..Default::default()
1943 },
1944 ),
1945 ),
1946 ..Default::default()
1947 },
1948 cx,
1949 )
1950 .await;
1951
1952 let mut full_request = cx
1953 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1954 move |_, _, _| async move {
1955 Ok(Some(lsp::SemanticTokensResult::Tokens(
1956 lsp::SemanticTokens {
1957 data: vec![0, 0, 5, 0, 1], // comment [documentation]
1958 result_id: None,
1959 },
1960 )))
1961 },
1962 );
1963
1964 cx.set_state("ˇ/// d\n");
1965 full_request.next().await;
1966 cx.run_until_parked();
1967 assert_eq!(
1968 extract_semantic_highlights(&cx.editor, &cx).len(),
1969 1,
1970 "Documentation comment should be highlighted"
1971 );
1972
1973 // Apply a BROAD empty rule for "comment" (no modifiers)
1974 cx.update(|_, cx| {
1975 SettingsStore::update_global(cx, |store, cx| {
1976 store.update_user_settings(cx, |settings| {
1977 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1978 semantic_token_rules: Some(SemanticTokenRules {
1979 rules: vec![SemanticTokenRule {
1980 token_type: Some("comment".to_string()),
1981 ..Default::default()
1982 }],
1983 }),
1984 ..Default::default()
1985 });
1986 });
1987 });
1988 });
1989
1990 cx.set_state("ˇ/// d\n");
1991 full_request.next().await;
1992 cx.run_until_parked();
1993
1994 assert!(
1995 extract_semantic_highlights(&cx.editor, &cx).is_empty(),
1996 "Broad empty rule should disable specific documentation comment"
1997 );
1998 }
1999
2000 #[gpui::test]
2001 async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
2002 cx: &mut TestAppContext,
2003 ) {
2004 use gpui::UpdateGlobal as _;
2005 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
2006
2007 init_test(cx, |_| {});
2008 update_test_language_settings(cx, &|s| {
2009 s.languages.0.insert(
2010 "Rust".into(),
2011 LanguageSettingsContent {
2012 semantic_tokens: Some(SemanticTokens::Full),
2013 ..Default::default()
2014 },
2015 );
2016 });
2017
2018 let mut cx = EditorLspTestContext::new_rust(
2019 lsp::ServerCapabilities {
2020 semantic_tokens_provider: Some(
2021 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2022 lsp::SemanticTokensOptions {
2023 legend: lsp::SemanticTokensLegend {
2024 token_types: vec!["comment".into()],
2025 token_modifiers: vec!["documentation".into()],
2026 },
2027 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2028 ..Default::default()
2029 },
2030 ),
2031 ),
2032 ..Default::default()
2033 },
2034 cx,
2035 )
2036 .await;
2037
2038 let mut full_request = cx
2039 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2040 move |_, _, _| async move {
2041 Ok(Some(lsp::SemanticTokensResult::Tokens(
2042 lsp::SemanticTokens {
2043 data: vec![
2044 0, 0, 5, 0, 1, // comment [documentation]
2045 1, 0, 5, 0, 0, // normal comment
2046 ],
2047 result_id: None,
2048 },
2049 )))
2050 },
2051 );
2052
2053 cx.set_state("ˇ/// d\n// n\n");
2054 full_request.next().await;
2055 cx.run_until_parked();
2056 assert_eq!(
2057 extract_semantic_highlights(&cx.editor, &cx).len(),
2058 2,
2059 "Both documentation and normal comments should be highlighted initially"
2060 );
2061
2062 // Apply a SPECIFIC empty rule for documentation only
2063 cx.update(|_, cx| {
2064 SettingsStore::update_global(cx, |store, cx| {
2065 store.update_user_settings(cx, |settings| {
2066 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2067 semantic_token_rules: Some(SemanticTokenRules {
2068 rules: vec![SemanticTokenRule {
2069 token_type: Some("comment".to_string()),
2070 token_modifiers: vec!["documentation".to_string()],
2071 ..Default::default()
2072 }],
2073 }),
2074 ..Default::default()
2075 });
2076 });
2077 });
2078 });
2079
2080 cx.set_state("ˇ/// d\n// n\n");
2081 full_request.next().await;
2082 cx.run_until_parked();
2083
2084 assert_eq!(
2085 extract_semantic_highlights(&cx.editor, &cx).len(),
2086 1,
2087 "Normal comment should still be highlighted (matched by default rule)"
2088 );
2089 }
2090
2091 fn extract_semantic_highlight_styles(
2092 editor: &Entity<Editor>,
2093 cx: &TestAppContext,
2094 ) -> Vec<HighlightStyle> {
2095 editor.read_with(cx, |editor, cx| {
2096 editor
2097 .display_map
2098 .read(cx)
2099 .semantic_token_highlights
2100 .iter()
2101 .flat_map(|(_, (v, interner))| {
2102 v.iter().map(|highlights| interner[highlights.style])
2103 })
2104 .collect()
2105 })
2106 }
2107}