1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::language_settings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.mode().is_full() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_excerpts(true, cx)
152 .into_values()
153 .map(|(buffer, ..)| buffer)
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && language_settings(
159 editor_buffer.read(cx).language().map(|l| l.name()),
160 editor_buffer.read(cx).file(),
161 cx,
162 )
163 .semantic_tokens
164 .enabled()
165 {
166 Some((editor_buffer_id, editor_buffer))
167 } else {
168 None
169 }
170 })
171 .collect::<HashMap<_, _>>();
172
173 for buffer_with_disabled_tokens in self
174 .display_map
175 .read(cx)
176 .semantic_token_highlights
177 .keys()
178 .copied()
179 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
180 .filter(|buffer_id| {
181 !self
182 .buffer
183 .read(cx)
184 .buffer(*buffer_id)
185 .is_some_and(|buffer| {
186 let buffer = buffer.read(cx);
187 language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
188 .semantic_tokens
189 .enabled()
190 })
191 })
192 .collect::<Vec<_>>()
193 {
194 self.semantic_token_state
195 .invalidate_buffer(&buffer_with_disabled_tokens);
196 self.display_map.update(cx, |display_map, _| {
197 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
198 });
199 }
200
201 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
202 cx.background_executor()
203 .timer(Duration::from_millis(50))
204 .await;
205 let Some(all_semantic_tokens_task) = editor
206 .update(cx, |editor, cx| {
207 buffers_to_query
208 .into_iter()
209 .filter_map(|(buffer_id, buffer)| {
210 let known_version = editor
211 .semantic_token_state
212 .fetched_for_buffers
213 .get(&buffer_id);
214 let query_version = buffer.read(cx).version();
215 if known_version.is_some_and(|known_version| {
216 !query_version.changed_since(known_version)
217 }) {
218 None
219 } else {
220 sema.semantic_tokens(buffer, for_server, cx).map(
221 |task| async move { (buffer_id, query_version, task.await) },
222 )
223 }
224 })
225 .collect::<Vec<_>>()
226 })
227 .ok()
228 else {
229 return;
230 };
231
232 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
233 editor
234 .update(cx, |editor, cx| {
235 editor.display_map.update(cx, |display_map, _| {
236 for buffer_id in invalidate_semantic_highlights_for_buffers {
237 display_map.invalidate_semantic_highlights(buffer_id);
238 editor.semantic_token_state.invalidate_buffer(&buffer_id);
239 }
240 });
241
242 if all_semantic_tokens.is_empty() {
243 return;
244 }
245 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
246
247 for (buffer_id, query_version, tokens) in all_semantic_tokens {
248 let tokens = match tokens {
249 Ok(BufferSemanticTokens {
250 tokens: Some(tokens),
251 }) => tokens,
252 Ok(BufferSemanticTokens { tokens: None }) => {
253 editor.display_map.update(cx, |display_map, _| {
254 display_map.invalidate_semantic_highlights(buffer_id);
255 });
256 continue;
257 }
258 Err(e) => {
259 log::error!(
260 "Failed to fetch semantic tokens for buffer \
261 {buffer_id:?}: {e:#}"
262 );
263 continue;
264 }
265 };
266
267 match editor
268 .semantic_token_state
269 .fetched_for_buffers
270 .entry(buffer_id)
271 {
272 hash_map::Entry::Occupied(mut o) => {
273 if query_version.changed_since(o.get()) {
274 o.insert(query_version);
275 } else {
276 continue;
277 }
278 }
279 hash_map::Entry::Vacant(v) => {
280 v.insert(query_version);
281 }
282 }
283
284 let language_name = editor
285 .buffer()
286 .read(cx)
287 .buffer(buffer_id)
288 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
289
290 editor.display_map.update(cx, |display_map, cx| {
291 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
292 let mut token_highlights = Vec::new();
293 let mut interner = HighlightStyleInterner::default();
294 for (server_id, server_tokens) in tokens {
295 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
296 server_id,
297 language_name.as_ref(),
298 cx,
299 ) else {
300 continue;
301 };
302 token_highlights.reserve(2 * server_tokens.len());
303 token_highlights.extend(buffer_into_editor_highlights(
304 &server_tokens,
305 stylizer,
306 &multi_buffer_snapshot,
307 &mut interner,
308 cx,
309 ));
310 }
311
312 token_highlights.sort_by(|a, b| {
313 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
314 });
315 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
316 buffer_id,
317 (Arc::from(token_highlights), Arc::new(interner)),
318 );
319 });
320 });
321 }
322
323 cx.notify();
324 })
325 .ok();
326 });
327 }
328}
329
330fn buffer_into_editor_highlights<'a, 'b>(
331 buffer_tokens: &'a [BufferSemanticToken],
332 stylizer: &'a SemanticTokenStylizer,
333 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
334 interner: &'b mut HighlightStyleInterner,
335 cx: &'a App,
336) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
337 multi_buffer_snapshot
338 .text_anchors_to_visible_anchors(
339 buffer_tokens
340 .iter()
341 .flat_map(|token| [token.range.start, token.range.end]),
342 )
343 .into_iter()
344 .tuples::<(_, _)>()
345 .zip(buffer_tokens)
346 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
347 let range = multi_buffer_start?..multi_buffer_end?;
348 let style = convert_token(
349 stylizer,
350 cx.theme().syntax(),
351 token.token_type,
352 token.token_modifiers,
353 )?;
354 let style = interner.intern(style);
355 Some(SemanticTokenHighlight {
356 range,
357 style,
358 token_type: token.token_type,
359 token_modifiers: token.token_modifiers,
360 server_id: stylizer.server_id(),
361 })
362 })
363}
364
365fn convert_token(
366 stylizer: &SemanticTokenStylizer,
367 theme: &SyntaxTheme,
368 token_type: TokenType,
369 modifiers: u32,
370) -> Option<HighlightStyle> {
371 let rules = stylizer.rules_for_token(token_type)?;
372 let matching = rules.iter().filter(|rule| {
373 rule.token_modifiers
374 .iter()
375 .all(|m| stylizer.has_modifier(modifiers, m))
376 });
377
378 let mut highlight = HighlightStyle::default();
379 let mut empty = true;
380
381 for rule in matching {
382 empty = false;
383
384 let style = rule.style.iter().find_map(|style| theme.get_opt(style));
385
386 macro_rules! overwrite {
387 (
388 highlight.$highlight_field:ident,
389 SemanticTokenRule::$rule_field:ident,
390 $transform:expr $(,)?
391 ) => {
392 highlight.$highlight_field = rule
393 .$rule_field
394 .map($transform)
395 .or_else(|| style.and_then(|s| s.$highlight_field))
396 .or(highlight.$highlight_field)
397 };
398 }
399
400 overwrite!(
401 highlight.color,
402 SemanticTokenRule::foreground_color,
403 Into::into,
404 );
405
406 overwrite!(
407 highlight.background_color,
408 SemanticTokenRule::background_color,
409 Into::into,
410 );
411
412 overwrite!(
413 highlight.font_weight,
414 SemanticTokenRule::font_weight,
415 |w| match w {
416 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
417 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
418 },
419 );
420
421 overwrite!(
422 highlight.font_style,
423 SemanticTokenRule::font_style,
424 |s| match s {
425 SemanticTokenFontStyle::Normal => FontStyle::Normal,
426 SemanticTokenFontStyle::Italic => FontStyle::Italic,
427 },
428 );
429
430 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
431 UnderlineStyle {
432 thickness: 1.0.into(),
433 color: match u {
434 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
435 SemanticTokenColorOverride::InheritForeground(false) => None,
436 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
437 },
438 ..UnderlineStyle::default()
439 }
440 });
441
442 overwrite!(
443 highlight.strikethrough,
444 SemanticTokenRule::strikethrough,
445 |s| StrikethroughStyle {
446 thickness: 1.0.into(),
447 color: match s {
448 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
449 SemanticTokenColorOverride::InheritForeground(false) => None,
450 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
451 },
452 },
453 );
454 }
455
456 if empty { None } else { Some(highlight) }
457}
458
459#[cfg(test)]
460mod tests {
461 use std::{
462 ops::Range,
463 sync::atomic::{self, AtomicUsize},
464 };
465
466 use futures::StreamExt as _;
467 use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
468 use language::{Language, LanguageConfig, LanguageMatcher};
469 use languages::FakeLspAdapter;
470 use multi_buffer::{
471 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
472 };
473 use project::Project;
474 use rope::Point;
475 use serde_json::json;
476 use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
477 use workspace::{MultiWorkspace, WorkspaceHandle as _};
478
479 use crate::{
480 Capability,
481 editor_tests::{init_test, update_test_language_settings},
482 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
483 };
484
485 use super::*;
486
487 #[gpui::test]
488 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
489 init_test(cx, |_| {});
490
491 update_test_language_settings(cx, &|language_settings| {
492 language_settings.languages.0.insert(
493 "Rust".into(),
494 LanguageSettingsContent {
495 semantic_tokens: Some(SemanticTokens::Full),
496 ..LanguageSettingsContent::default()
497 },
498 );
499 });
500
501 let mut cx = EditorLspTestContext::new_rust(
502 lsp::ServerCapabilities {
503 semantic_tokens_provider: Some(
504 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
505 lsp::SemanticTokensOptions {
506 legend: lsp::SemanticTokensLegend {
507 token_types: vec!["function".into()],
508 token_modifiers: Vec::new(),
509 },
510 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
511 ..lsp::SemanticTokensOptions::default()
512 },
513 ),
514 ),
515 ..lsp::ServerCapabilities::default()
516 },
517 cx,
518 )
519 .await;
520
521 let full_counter = Arc::new(AtomicUsize::new(0));
522 let full_counter_clone = full_counter.clone();
523
524 let mut full_request = cx
525 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
526 move |_, _, _| {
527 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
528 async move {
529 Ok(Some(lsp::SemanticTokensResult::Tokens(
530 lsp::SemanticTokens {
531 data: vec![
532 0, // delta_line
533 3, // delta_start
534 4, // length
535 0, // token_type
536 0, // token_modifiers_bitset
537 ],
538 // The server isn't capable of deltas, so even though we sent back
539 // a result ID, the client shouldn't request a delta.
540 result_id: Some("a".into()),
541 },
542 )))
543 }
544 },
545 );
546
547 cx.set_state("ˇfn main() {}");
548 assert!(full_request.next().await.is_some());
549
550 cx.run_until_parked();
551
552 cx.set_state("ˇfn main() { a }");
553 assert!(full_request.next().await.is_some());
554
555 cx.run_until_parked();
556
557 assert_eq!(
558 extract_semantic_highlights(&cx.editor, &cx),
559 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
560 );
561
562 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
563 }
564
565 #[gpui::test]
566 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
567 init_test(cx, |_| {});
568
569 update_test_language_settings(cx, &|language_settings| {
570 language_settings.languages.0.insert(
571 "Rust".into(),
572 LanguageSettingsContent {
573 semantic_tokens: Some(SemanticTokens::Full),
574 ..LanguageSettingsContent::default()
575 },
576 );
577 });
578
579 let mut cx = EditorLspTestContext::new_rust(
580 lsp::ServerCapabilities {
581 semantic_tokens_provider: Some(
582 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
583 lsp::SemanticTokensOptions {
584 legend: lsp::SemanticTokensLegend {
585 token_types: vec!["function".into()],
586 token_modifiers: Vec::new(),
587 },
588 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
589 ..lsp::SemanticTokensOptions::default()
590 },
591 ),
592 ),
593 ..lsp::ServerCapabilities::default()
594 },
595 cx,
596 )
597 .await;
598
599 let full_counter = Arc::new(AtomicUsize::new(0));
600 let full_counter_clone = full_counter.clone();
601
602 let mut full_request = cx
603 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
604 move |_, _, _| {
605 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
606 async move {
607 Ok(Some(lsp::SemanticTokensResult::Tokens(
608 lsp::SemanticTokens {
609 data: vec![
610 0, // delta_line
611 3, // delta_start
612 4, // length
613 0, // token_type
614 0, // token_modifiers_bitset
615 ],
616 result_id: None, // Sending back `None` forces the client to not use deltas.
617 },
618 )))
619 }
620 },
621 );
622
623 cx.set_state("ˇfn main() {}");
624 assert!(full_request.next().await.is_some());
625
626 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
627 task.await;
628
629 cx.set_state("ˇfn main() { a }");
630 assert!(full_request.next().await.is_some());
631
632 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
633 task.await;
634 assert_eq!(
635 extract_semantic_highlights(&cx.editor, &cx),
636 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
637 );
638 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
639 }
640
641 #[gpui::test]
642 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
643 init_test(cx, |_| {});
644
645 update_test_language_settings(cx, &|language_settings| {
646 language_settings.languages.0.insert(
647 "Rust".into(),
648 LanguageSettingsContent {
649 semantic_tokens: Some(SemanticTokens::Full),
650 ..LanguageSettingsContent::default()
651 },
652 );
653 });
654
655 let mut cx = EditorLspTestContext::new_rust(
656 lsp::ServerCapabilities {
657 semantic_tokens_provider: Some(
658 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
659 lsp::SemanticTokensOptions {
660 legend: lsp::SemanticTokensLegend {
661 token_types: vec!["function".into()],
662 token_modifiers: Vec::new(),
663 },
664 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
665 ..lsp::SemanticTokensOptions::default()
666 },
667 ),
668 ),
669 ..lsp::ServerCapabilities::default()
670 },
671 cx,
672 )
673 .await;
674
675 let full_counter = Arc::new(AtomicUsize::new(0));
676 let full_counter_clone = full_counter.clone();
677 let delta_counter = Arc::new(AtomicUsize::new(0));
678 let delta_counter_clone = delta_counter.clone();
679
680 let mut full_request = cx
681 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
682 move |_, _, _| {
683 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
684 async move {
685 Ok(Some(lsp::SemanticTokensResult::Tokens(
686 lsp::SemanticTokens {
687 data: vec![
688 0, // delta_line
689 3, // delta_start
690 4, // length
691 0, // token_type
692 0, // token_modifiers_bitset
693 ],
694 result_id: Some("a".into()),
695 },
696 )))
697 }
698 },
699 );
700
701 let mut delta_request = cx
702 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
703 move |_, params, _| {
704 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
705 assert_eq!(params.previous_result_id, "a");
706 async move {
707 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
708 lsp::SemanticTokensDelta {
709 edits: Vec::new(),
710 result_id: Some("b".into()),
711 },
712 )))
713 }
714 },
715 );
716
717 // Initial request, for the empty buffer.
718 cx.set_state("ˇfn main() {}");
719 assert!(full_request.next().await.is_some());
720 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
721 task.await;
722
723 cx.set_state("ˇfn main() { a }");
724 assert!(delta_request.next().await.is_some());
725 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
726 task.await;
727
728 assert_eq!(
729 extract_semantic_highlights(&cx.editor, &cx),
730 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
731 );
732
733 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
734 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
735 }
736
737 #[gpui::test]
738 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
739 init_test(cx, |_| {});
740
741 update_test_language_settings(cx, &|language_settings| {
742 language_settings.languages.0.insert(
743 "TOML".into(),
744 LanguageSettingsContent {
745 semantic_tokens: Some(SemanticTokens::Full),
746 ..LanguageSettingsContent::default()
747 },
748 );
749 });
750
751 let toml_language = Arc::new(Language::new(
752 LanguageConfig {
753 name: "TOML".into(),
754 matcher: LanguageMatcher {
755 path_suffixes: vec!["toml".into()],
756 ..LanguageMatcher::default()
757 },
758 ..LanguageConfig::default()
759 },
760 None,
761 ));
762
763 // We have 2 language servers for TOML in this test.
764 let toml_legend_1 = lsp::SemanticTokensLegend {
765 token_types: vec!["property".into()],
766 token_modifiers: Vec::new(),
767 };
768 let toml_legend_2 = lsp::SemanticTokensLegend {
769 token_types: vec!["number".into()],
770 token_modifiers: Vec::new(),
771 };
772
773 let app_state = cx.update(workspace::AppState::test);
774
775 cx.update(|cx| {
776 assets::Assets.load_test_fonts(cx);
777 crate::init(cx);
778 workspace::init(app_state.clone(), cx);
779 });
780
781 let project = Project::test(app_state.fs.clone(), [], cx).await;
782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
783
784 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
785 let full_counter_toml_1_clone = full_counter_toml_1.clone();
786 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
787 let full_counter_toml_2_clone = full_counter_toml_2.clone();
788
789 let mut toml_server_1 = language_registry.register_fake_lsp(
790 toml_language.name(),
791 FakeLspAdapter {
792 name: "toml1",
793 capabilities: lsp::ServerCapabilities {
794 semantic_tokens_provider: Some(
795 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
796 lsp::SemanticTokensOptions {
797 legend: toml_legend_1,
798 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
799 ..lsp::SemanticTokensOptions::default()
800 },
801 ),
802 ),
803 ..lsp::ServerCapabilities::default()
804 },
805 initializer: Some(Box::new({
806 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
807 move |fake_server| {
808 let full_counter = full_counter_toml_1_clone.clone();
809 fake_server
810 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
811 move |_, _| {
812 full_counter.fetch_add(1, atomic::Ordering::Release);
813 async move {
814 Ok(Some(lsp::SemanticTokensResult::Tokens(
815 lsp::SemanticTokens {
816 // highlight 'a' as a property
817 data: vec![
818 0, // delta_line
819 0, // delta_start
820 1, // length
821 0, // token_type
822 0, // token_modifiers_bitset
823 ],
824 result_id: Some("a".into()),
825 },
826 )))
827 }
828 },
829 );
830 }
831 })),
832 ..FakeLspAdapter::default()
833 },
834 );
835 let mut toml_server_2 = language_registry.register_fake_lsp(
836 toml_language.name(),
837 FakeLspAdapter {
838 name: "toml2",
839 capabilities: lsp::ServerCapabilities {
840 semantic_tokens_provider: Some(
841 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
842 lsp::SemanticTokensOptions {
843 legend: toml_legend_2,
844 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
845 ..lsp::SemanticTokensOptions::default()
846 },
847 ),
848 ),
849 ..lsp::ServerCapabilities::default()
850 },
851 initializer: Some(Box::new({
852 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
853 move |fake_server| {
854 let full_counter = full_counter_toml_2_clone.clone();
855 fake_server
856 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
857 move |_, _| {
858 full_counter.fetch_add(1, atomic::Ordering::Release);
859 async move {
860 Ok(Some(lsp::SemanticTokensResult::Tokens(
861 lsp::SemanticTokens {
862 // highlight '3' as a literal
863 data: vec![
864 0, // delta_line
865 4, // delta_start
866 1, // length
867 0, // token_type
868 0, // token_modifiers_bitset
869 ],
870 result_id: Some("a".into()),
871 },
872 )))
873 }
874 },
875 );
876 }
877 })),
878 ..FakeLspAdapter::default()
879 },
880 );
881 language_registry.add(toml_language.clone());
882
883 app_state
884 .fs
885 .as_fake()
886 .insert_tree(
887 EditorLspTestContext::root_path(),
888 json!({
889 ".git": {},
890 "dir": {
891 "foo.toml": "a = 1\nb = 2\n",
892 }
893 }),
894 )
895 .await;
896
897 let (multi_workspace, cx) =
898 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
899 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
900 project
901 .update(cx, |project, cx| {
902 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
903 })
904 .await
905 .unwrap();
906 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
907 .await;
908
909 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
910 let toml_item = workspace
911 .update_in(cx, |workspace, window, cx| {
912 workspace.open_path(toml_file, None, true, window, cx)
913 })
914 .await
915 .expect("Could not open test file");
916
917 let editor = cx.update(|_, cx| {
918 toml_item
919 .act_as::<Editor>(cx)
920 .expect("Opened test file wasn't an editor")
921 });
922
923 editor.update_in(cx, |editor, window, cx| {
924 let nav_history = workspace
925 .read(cx)
926 .active_pane()
927 .read(cx)
928 .nav_history_for_item(&cx.entity());
929 editor.set_nav_history(Some(nav_history));
930 window.focus(&editor.focus_handle(cx), cx)
931 });
932
933 let _toml_server_1 = toml_server_1.next().await.unwrap();
934 let _toml_server_2 = toml_server_2.next().await.unwrap();
935
936 // Trigger semantic tokens.
937 editor.update_in(cx, |editor, _, cx| {
938 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
939 });
940 cx.executor().advance_clock(Duration::from_millis(200));
941 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
942 cx.run_until_parked();
943 task.await;
944
945 assert_eq!(
946 extract_semantic_highlights(&editor, &cx),
947 vec![
948 MultiBufferOffset(0)..MultiBufferOffset(1),
949 MultiBufferOffset(4)..MultiBufferOffset(5),
950 ]
951 );
952
953 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
954 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
955 }
956
957 #[gpui::test]
958 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
959 init_test(cx, |_| {});
960
961 update_test_language_settings(cx, &|language_settings| {
962 language_settings.languages.0.insert(
963 "TOML".into(),
964 LanguageSettingsContent {
965 semantic_tokens: Some(SemanticTokens::Full),
966 ..LanguageSettingsContent::default()
967 },
968 );
969 language_settings.languages.0.insert(
970 "Rust".into(),
971 LanguageSettingsContent {
972 semantic_tokens: Some(SemanticTokens::Full),
973 ..LanguageSettingsContent::default()
974 },
975 );
976 });
977
978 let toml_language = Arc::new(Language::new(
979 LanguageConfig {
980 name: "TOML".into(),
981 matcher: LanguageMatcher {
982 path_suffixes: vec!["toml".into()],
983 ..LanguageMatcher::default()
984 },
985 ..LanguageConfig::default()
986 },
987 None,
988 ));
989 let rust_language = Arc::new(Language::new(
990 LanguageConfig {
991 name: "Rust".into(),
992 matcher: LanguageMatcher {
993 path_suffixes: vec!["rs".into()],
994 ..LanguageMatcher::default()
995 },
996 ..LanguageConfig::default()
997 },
998 None,
999 ));
1000
1001 let toml_legend = lsp::SemanticTokensLegend {
1002 token_types: vec!["property".into()],
1003 token_modifiers: Vec::new(),
1004 };
1005 let rust_legend = lsp::SemanticTokensLegend {
1006 token_types: vec!["constant".into()],
1007 token_modifiers: Vec::new(),
1008 };
1009
1010 let app_state = cx.update(workspace::AppState::test);
1011
1012 cx.update(|cx| {
1013 assets::Assets.load_test_fonts(cx);
1014 crate::init(cx);
1015 workspace::init(app_state.clone(), cx);
1016 });
1017
1018 let project = Project::test(app_state.fs.clone(), [], cx).await;
1019 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1020 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1021 let full_counter_toml_clone = full_counter_toml.clone();
1022
1023 let mut toml_server = language_registry.register_fake_lsp(
1024 toml_language.name(),
1025 FakeLspAdapter {
1026 name: "toml",
1027 capabilities: lsp::ServerCapabilities {
1028 semantic_tokens_provider: Some(
1029 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1030 lsp::SemanticTokensOptions {
1031 legend: toml_legend,
1032 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1033 ..lsp::SemanticTokensOptions::default()
1034 },
1035 ),
1036 ),
1037 ..lsp::ServerCapabilities::default()
1038 },
1039 initializer: Some(Box::new({
1040 let full_counter_toml_clone = full_counter_toml_clone.clone();
1041 move |fake_server| {
1042 let full_counter = full_counter_toml_clone.clone();
1043 fake_server
1044 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1045 move |_, _| {
1046 full_counter.fetch_add(1, atomic::Ordering::Release);
1047 async move {
1048 Ok(Some(lsp::SemanticTokensResult::Tokens(
1049 lsp::SemanticTokens {
1050 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1051 data: vec![
1052 0, // delta_line (line 0)
1053 0, // delta_start
1054 1, // length
1055 0, // token_type
1056 0, // token_modifiers_bitset
1057 1, // delta_line (line 1)
1058 0, // delta_start
1059 1, // length
1060 0, // token_type
1061 0, // token_modifiers_bitset
1062 1, // delta_line (line 2)
1063 0, // delta_start
1064 1, // length
1065 0, // token_type
1066 0, // token_modifiers_bitset
1067 ],
1068 result_id: Some("a".into()),
1069 },
1070 )))
1071 }
1072 },
1073 );
1074 }
1075 })),
1076 ..FakeLspAdapter::default()
1077 },
1078 );
1079 language_registry.add(toml_language.clone());
1080 let mut rust_server = language_registry.register_fake_lsp(
1081 rust_language.name(),
1082 FakeLspAdapter {
1083 name: "rust",
1084 capabilities: lsp::ServerCapabilities {
1085 semantic_tokens_provider: Some(
1086 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1087 lsp::SemanticTokensOptions {
1088 legend: rust_legend,
1089 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1090 ..lsp::SemanticTokensOptions::default()
1091 },
1092 ),
1093 ),
1094 ..lsp::ServerCapabilities::default()
1095 },
1096 ..FakeLspAdapter::default()
1097 },
1098 );
1099 language_registry.add(rust_language.clone());
1100
1101 app_state
1102 .fs
1103 .as_fake()
1104 .insert_tree(
1105 EditorLspTestContext::root_path(),
1106 json!({
1107 ".git": {},
1108 "dir": {
1109 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1110 "bar.rs": "const c: usize = 3;\n",
1111 }
1112 }),
1113 )
1114 .await;
1115
1116 let (multi_workspace, cx) =
1117 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1118 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1119 project
1120 .update(cx, |project, cx| {
1121 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1122 })
1123 .await
1124 .unwrap();
1125 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1126 .await;
1127
1128 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1129 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1130 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1131 (
1132 workspace.open_path(toml_file, None, true, window, cx),
1133 workspace.open_path(rust_file, None, true, window, cx),
1134 )
1135 });
1136 let toml_item = toml_item.await.expect("Could not open test file");
1137 let rust_item = rust_item.await.expect("Could not open test file");
1138
1139 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1140 (
1141 toml_item
1142 .act_as::<Editor>(cx)
1143 .expect("Opened test file wasn't an editor"),
1144 rust_item
1145 .act_as::<Editor>(cx)
1146 .expect("Opened test file wasn't an editor"),
1147 )
1148 });
1149 let toml_buffer = cx.read(|cx| {
1150 toml_editor
1151 .read(cx)
1152 .buffer()
1153 .read(cx)
1154 .as_singleton()
1155 .unwrap()
1156 });
1157 let rust_buffer = cx.read(|cx| {
1158 rust_editor
1159 .read(cx)
1160 .buffer()
1161 .read(cx)
1162 .as_singleton()
1163 .unwrap()
1164 });
1165 let multibuffer = cx.new(|cx| {
1166 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1167 multibuffer.set_excerpts_for_path(
1168 PathKey::sorted(0),
1169 toml_buffer.clone(),
1170 [Point::new(0, 0)..Point::new(0, 4)],
1171 0,
1172 cx,
1173 );
1174 multibuffer.set_excerpts_for_path(
1175 PathKey::sorted(1),
1176 rust_buffer.clone(),
1177 [Point::new(0, 0)..Point::new(0, 4)],
1178 0,
1179 cx,
1180 );
1181 multibuffer
1182 });
1183
1184 let editor = workspace.update_in(cx, |workspace, window, cx| {
1185 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1186 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1187 editor
1188 });
1189 editor.update_in(cx, |editor, window, cx| {
1190 let nav_history = workspace
1191 .read(cx)
1192 .active_pane()
1193 .read(cx)
1194 .nav_history_for_item(&cx.entity());
1195 editor.set_nav_history(Some(nav_history));
1196 window.focus(&editor.focus_handle(cx), cx)
1197 });
1198
1199 let _toml_server = toml_server.next().await.unwrap();
1200 let _rust_server = rust_server.next().await.unwrap();
1201
1202 // Initial request.
1203 cx.executor().advance_clock(Duration::from_millis(200));
1204 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1205 cx.run_until_parked();
1206 task.await;
1207 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1208 cx.run_until_parked();
1209
1210 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1211 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1212 assert_eq!(
1213 extract_semantic_highlights(&editor, &cx),
1214 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1215 );
1216
1217 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1218 let toml_excerpt_id =
1219 editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1220 editor.update_in(cx, |editor, _, cx| {
1221 editor.buffer().update(cx, |buffer, cx| {
1222 buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1223 });
1224 });
1225
1226 // Wait for semantic tokens to be re-fetched after expansion.
1227 cx.executor().advance_clock(Duration::from_millis(200));
1228 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1229 cx.run_until_parked();
1230 task.await;
1231
1232 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1233 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1234 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1235 assert_eq!(
1236 extract_semantic_highlights(&editor, &cx),
1237 vec![
1238 MultiBufferOffset(0)..MultiBufferOffset(1),
1239 MultiBufferOffset(6)..MultiBufferOffset(7),
1240 MultiBufferOffset(12)..MultiBufferOffset(13),
1241 ]
1242 );
1243 }
1244
1245 fn extract_semantic_highlights(
1246 editor: &Entity<Editor>,
1247 cx: &TestAppContext,
1248 ) -> Vec<Range<MultiBufferOffset>> {
1249 editor.read_with(cx, |editor, cx| {
1250 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1251 editor
1252 .display_map
1253 .read(cx)
1254 .semantic_token_highlights
1255 .iter()
1256 .flat_map(|(_, (v, _))| v.iter())
1257 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1258 .collect()
1259 })
1260 }
1261
1262 #[gpui::test]
1263 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1264 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1265 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1266
1267 init_test(cx, |_| {});
1268
1269 update_test_language_settings(cx, &|language_settings| {
1270 language_settings.languages.0.insert(
1271 "Rust".into(),
1272 LanguageSettingsContent {
1273 semantic_tokens: Some(SemanticTokens::Full),
1274 ..LanguageSettingsContent::default()
1275 },
1276 );
1277 });
1278
1279 let mut cx = EditorLspTestContext::new_rust(
1280 lsp::ServerCapabilities {
1281 semantic_tokens_provider: Some(
1282 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1283 lsp::SemanticTokensOptions {
1284 legend: lsp::SemanticTokensLegend {
1285 token_types: Vec::from(["function".into()]),
1286 token_modifiers: Vec::new(),
1287 },
1288 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1289 ..lsp::SemanticTokensOptions::default()
1290 },
1291 ),
1292 ),
1293 ..lsp::ServerCapabilities::default()
1294 },
1295 cx,
1296 )
1297 .await;
1298
1299 let mut full_request = cx
1300 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1301 move |_, _, _| {
1302 async move {
1303 Ok(Some(lsp::SemanticTokensResult::Tokens(
1304 lsp::SemanticTokens {
1305 data: vec![
1306 0, // delta_line
1307 3, // delta_start
1308 4, // length
1309 0, // token_type (function)
1310 0, // token_modifiers_bitset
1311 ],
1312 result_id: None,
1313 },
1314 )))
1315 }
1316 },
1317 );
1318
1319 // Trigger initial semantic tokens fetch
1320 cx.set_state("ˇfn main() {}");
1321 full_request.next().await;
1322 cx.run_until_parked();
1323
1324 // Verify initial highlights exist (with no custom color yet)
1325 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1326 assert_eq!(
1327 initial_ranges,
1328 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1329 "Should have initial semantic token highlights"
1330 );
1331 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1332 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1333 // Initial color should be None or theme default (not red or blue)
1334 let initial_color = initial_styles[0].color;
1335
1336 // Set a custom foreground color for function tokens via settings.json
1337 let red_color = Rgba {
1338 r: 1.0,
1339 g: 0.0,
1340 b: 0.0,
1341 a: 1.0,
1342 };
1343 cx.update(|_, cx| {
1344 SettingsStore::update_global(cx, |store, cx| {
1345 store.update_user_settings(cx, |settings| {
1346 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1347 semantic_token_rules: Some(SemanticTokenRules {
1348 rules: Vec::from([SemanticTokenRule {
1349 token_type: Some("function".to_string()),
1350 foreground_color: Some(red_color),
1351 ..SemanticTokenRule::default()
1352 }]),
1353 }),
1354 ..GlobalLspSettingsContent::default()
1355 });
1356 });
1357 });
1358 });
1359
1360 // Trigger a refetch by making an edit (which forces semantic tokens update)
1361 cx.set_state("ˇfn main() { }");
1362 full_request.next().await;
1363 cx.run_until_parked();
1364
1365 // Verify the highlights now have the custom red color
1366 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1367 assert_eq!(
1368 styles_after_settings_change.len(),
1369 1,
1370 "Should still have one highlight"
1371 );
1372 assert_eq!(
1373 styles_after_settings_change[0].color,
1374 Some(Hsla::from(red_color)),
1375 "Highlight should have the custom red color from settings.json"
1376 );
1377 assert_ne!(
1378 styles_after_settings_change[0].color, initial_color,
1379 "Color should have changed from initial"
1380 );
1381 }
1382
1383 #[gpui::test]
1384 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1385 use collections::IndexMap;
1386 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1387 use theme::{HighlightStyleContent, ThemeStyleContent};
1388
1389 init_test(cx, |_| {});
1390
1391 update_test_language_settings(cx, &|language_settings| {
1392 language_settings.languages.0.insert(
1393 "Rust".into(),
1394 LanguageSettingsContent {
1395 semantic_tokens: Some(SemanticTokens::Full),
1396 ..LanguageSettingsContent::default()
1397 },
1398 );
1399 });
1400
1401 let mut cx = EditorLspTestContext::new_rust(
1402 lsp::ServerCapabilities {
1403 semantic_tokens_provider: Some(
1404 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1405 lsp::SemanticTokensOptions {
1406 legend: lsp::SemanticTokensLegend {
1407 token_types: Vec::from(["function".into()]),
1408 token_modifiers: Vec::new(),
1409 },
1410 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1411 ..lsp::SemanticTokensOptions::default()
1412 },
1413 ),
1414 ),
1415 ..lsp::ServerCapabilities::default()
1416 },
1417 cx,
1418 )
1419 .await;
1420
1421 let mut full_request = cx
1422 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1423 move |_, _, _| async move {
1424 Ok(Some(lsp::SemanticTokensResult::Tokens(
1425 lsp::SemanticTokens {
1426 data: vec![
1427 0, // delta_line
1428 3, // delta_start
1429 4, // length
1430 0, // token_type (function)
1431 0, // token_modifiers_bitset
1432 ],
1433 result_id: None,
1434 },
1435 )))
1436 },
1437 );
1438
1439 cx.set_state("ˇfn main() {}");
1440 full_request.next().await;
1441 cx.run_until_parked();
1442
1443 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1444 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1445 let initial_color = initial_styles[0].color;
1446
1447 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1448 // which fires theme_changed → refresh_semantic_token_highlights.
1449 let red_color: Hsla = Rgba {
1450 r: 1.0,
1451 g: 0.0,
1452 b: 0.0,
1453 a: 1.0,
1454 }
1455 .into();
1456 cx.update(|_, cx| {
1457 SettingsStore::update_global(cx, |store, cx| {
1458 store.update_user_settings(cx, |settings| {
1459 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1460 syntax: IndexMap::from_iter([(
1461 "function".to_string(),
1462 HighlightStyleContent {
1463 color: Some("#ff0000".to_string()),
1464 background_color: None,
1465 font_style: None,
1466 font_weight: None,
1467 },
1468 )]),
1469 ..ThemeStyleContent::default()
1470 });
1471 });
1472 });
1473 });
1474
1475 cx.executor().advance_clock(Duration::from_millis(200));
1476 cx.run_until_parked();
1477
1478 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1479 assert_eq!(styles_after_override.len(), 1);
1480 assert_eq!(
1481 styles_after_override[0].color,
1482 Some(red_color),
1483 "Highlight should have red color from theme override"
1484 );
1485 assert_ne!(
1486 styles_after_override[0].color, initial_color,
1487 "Color should have changed from initial"
1488 );
1489
1490 // Changing the override to a different color also restyles.
1491 let blue_color: Hsla = Rgba {
1492 r: 0.0,
1493 g: 0.0,
1494 b: 1.0,
1495 a: 1.0,
1496 }
1497 .into();
1498 cx.update(|_, cx| {
1499 SettingsStore::update_global(cx, |store, cx| {
1500 store.update_user_settings(cx, |settings| {
1501 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1502 syntax: IndexMap::from_iter([(
1503 "function".to_string(),
1504 HighlightStyleContent {
1505 color: Some("#0000ff".to_string()),
1506 background_color: None,
1507 font_style: None,
1508 font_weight: None,
1509 },
1510 )]),
1511 ..ThemeStyleContent::default()
1512 });
1513 });
1514 });
1515 });
1516
1517 cx.executor().advance_clock(Duration::from_millis(200));
1518 cx.run_until_parked();
1519
1520 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1521 assert_eq!(styles_after_second_override.len(), 1);
1522 assert_eq!(
1523 styles_after_second_override[0].color,
1524 Some(blue_color),
1525 "Highlight should have blue color from updated theme override"
1526 );
1527
1528 // Removing overrides reverts to the original theme color.
1529 cx.update(|_, cx| {
1530 SettingsStore::update_global(cx, |store, cx| {
1531 store.update_user_settings(cx, |settings| {
1532 settings.theme.experimental_theme_overrides = None;
1533 });
1534 });
1535 });
1536
1537 cx.executor().advance_clock(Duration::from_millis(200));
1538 cx.run_until_parked();
1539
1540 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1541 assert_eq!(styles_after_clear.len(), 1);
1542 assert_eq!(
1543 styles_after_clear[0].color, initial_color,
1544 "Highlight should revert to initial color after clearing overrides"
1545 );
1546 }
1547
1548 #[gpui::test]
1549 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1550 use collections::IndexMap;
1551 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1552 use theme::{HighlightStyleContent, ThemeStyleContent};
1553 use ui::ActiveTheme as _;
1554
1555 init_test(cx, |_| {});
1556
1557 update_test_language_settings(cx, &|language_settings| {
1558 language_settings.languages.0.insert(
1559 "Rust".into(),
1560 LanguageSettingsContent {
1561 semantic_tokens: Some(SemanticTokens::Full),
1562 ..LanguageSettingsContent::default()
1563 },
1564 );
1565 });
1566
1567 let mut cx = EditorLspTestContext::new_rust(
1568 lsp::ServerCapabilities {
1569 semantic_tokens_provider: Some(
1570 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1571 lsp::SemanticTokensOptions {
1572 legend: lsp::SemanticTokensLegend {
1573 token_types: Vec::from(["function".into()]),
1574 token_modifiers: Vec::new(),
1575 },
1576 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1577 ..lsp::SemanticTokensOptions::default()
1578 },
1579 ),
1580 ),
1581 ..lsp::ServerCapabilities::default()
1582 },
1583 cx,
1584 )
1585 .await;
1586
1587 let mut full_request = cx
1588 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1589 move |_, _, _| async move {
1590 Ok(Some(lsp::SemanticTokensResult::Tokens(
1591 lsp::SemanticTokens {
1592 data: vec![
1593 0, // delta_line
1594 3, // delta_start
1595 4, // length
1596 0, // token_type (function)
1597 0, // token_modifiers_bitset
1598 ],
1599 result_id: None,
1600 },
1601 )))
1602 },
1603 );
1604
1605 cx.set_state("ˇfn main() {}");
1606 full_request.next().await;
1607 cx.run_until_parked();
1608
1609 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1610 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1611 let initial_color = initial_styles[0].color;
1612
1613 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1614 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1615 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1616 let green_color: Hsla = Rgba {
1617 r: 0.0,
1618 g: 1.0,
1619 b: 0.0,
1620 a: 1.0,
1621 }
1622 .into();
1623 cx.update(|_, cx| {
1624 SettingsStore::update_global(cx, |store, cx| {
1625 store.update_user_settings(cx, |settings| {
1626 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1627 theme_name.clone(),
1628 ThemeStyleContent {
1629 syntax: IndexMap::from_iter([(
1630 "function".to_string(),
1631 HighlightStyleContent {
1632 color: Some("#00ff00".to_string()),
1633 background_color: None,
1634 font_style: None,
1635 font_weight: None,
1636 },
1637 )]),
1638 ..ThemeStyleContent::default()
1639 },
1640 )]);
1641 });
1642 });
1643 });
1644
1645 cx.executor().advance_clock(Duration::from_millis(200));
1646 cx.run_until_parked();
1647
1648 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1649 assert_eq!(styles_after_override.len(), 1);
1650 assert_eq!(
1651 styles_after_override[0].color,
1652 Some(green_color),
1653 "Highlight should have green color from per-theme override"
1654 );
1655 assert_ne!(
1656 styles_after_override[0].color, initial_color,
1657 "Color should have changed from initial"
1658 );
1659 }
1660
1661 #[gpui::test]
1662 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1663 init_test(cx, |_| {});
1664
1665 update_test_language_settings(cx, &|language_settings| {
1666 language_settings.languages.0.insert(
1667 "Rust".into(),
1668 LanguageSettingsContent {
1669 semantic_tokens: Some(SemanticTokens::Full),
1670 ..LanguageSettingsContent::default()
1671 },
1672 );
1673 });
1674
1675 let mut cx = EditorLspTestContext::new_rust(
1676 lsp::ServerCapabilities {
1677 semantic_tokens_provider: Some(
1678 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1679 lsp::SemanticTokensOptions {
1680 legend: lsp::SemanticTokensLegend {
1681 token_types: vec!["function".into()],
1682 token_modifiers: Vec::new(),
1683 },
1684 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1685 ..lsp::SemanticTokensOptions::default()
1686 },
1687 ),
1688 ),
1689 ..lsp::ServerCapabilities::default()
1690 },
1691 cx,
1692 )
1693 .await;
1694
1695 let mut full_request = cx
1696 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1697 move |_, _, _| async move {
1698 Ok(Some(lsp::SemanticTokensResult::Tokens(
1699 lsp::SemanticTokens {
1700 data: vec![
1701 0, // delta_line
1702 3, // delta_start
1703 4, // length
1704 0, // token_type
1705 0, // token_modifiers_bitset
1706 ],
1707 result_id: None,
1708 },
1709 )))
1710 },
1711 );
1712
1713 cx.set_state("ˇfn main() {}");
1714 assert!(full_request.next().await.is_some());
1715 cx.run_until_parked();
1716
1717 assert_eq!(
1718 extract_semantic_highlights(&cx.editor, &cx),
1719 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1720 "Semantic tokens should be present before stopping the server"
1721 );
1722
1723 cx.update_editor(|editor, _, cx| {
1724 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1725 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1726 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1727 })
1728 });
1729 cx.executor().advance_clock(Duration::from_millis(200));
1730 cx.run_until_parked();
1731
1732 assert_eq!(
1733 extract_semantic_highlights(&cx.editor, &cx),
1734 Vec::new(),
1735 "Semantic tokens should be cleared after stopping the server"
1736 );
1737 }
1738
1739 #[gpui::test]
1740 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1741 init_test(cx, |_| {});
1742
1743 update_test_language_settings(cx, &|language_settings| {
1744 language_settings.languages.0.insert(
1745 "Rust".into(),
1746 LanguageSettingsContent {
1747 semantic_tokens: Some(SemanticTokens::Full),
1748 ..LanguageSettingsContent::default()
1749 },
1750 );
1751 });
1752
1753 let mut cx = EditorLspTestContext::new_rust(
1754 lsp::ServerCapabilities {
1755 semantic_tokens_provider: Some(
1756 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1757 lsp::SemanticTokensOptions {
1758 legend: lsp::SemanticTokensLegend {
1759 token_types: vec!["function".into()],
1760 token_modifiers: Vec::new(),
1761 },
1762 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1763 ..lsp::SemanticTokensOptions::default()
1764 },
1765 ),
1766 ),
1767 ..lsp::ServerCapabilities::default()
1768 },
1769 cx,
1770 )
1771 .await;
1772
1773 let mut full_request = cx
1774 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1775 move |_, _, _| async move {
1776 Ok(Some(lsp::SemanticTokensResult::Tokens(
1777 lsp::SemanticTokens {
1778 data: vec![
1779 0, // delta_line
1780 3, // delta_start
1781 4, // length
1782 0, // token_type
1783 0, // token_modifiers_bitset
1784 ],
1785 result_id: None,
1786 },
1787 )))
1788 },
1789 );
1790
1791 cx.set_state("ˇfn main() {}");
1792 assert!(full_request.next().await.is_some());
1793 cx.run_until_parked();
1794
1795 assert_eq!(
1796 extract_semantic_highlights(&cx.editor, &cx),
1797 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1798 "Semantic tokens should be present before disabling the setting"
1799 );
1800
1801 update_test_language_settings(&mut cx, &|language_settings| {
1802 language_settings.languages.0.insert(
1803 "Rust".into(),
1804 LanguageSettingsContent {
1805 semantic_tokens: Some(SemanticTokens::Off),
1806 ..LanguageSettingsContent::default()
1807 },
1808 );
1809 });
1810 cx.executor().advance_clock(Duration::from_millis(200));
1811 cx.run_until_parked();
1812
1813 assert_eq!(
1814 extract_semantic_highlights(&cx.editor, &cx),
1815 Vec::new(),
1816 "Semantic tokens should be cleared after disabling the setting"
1817 );
1818 }
1819
1820 fn extract_semantic_highlight_styles(
1821 editor: &Entity<Editor>,
1822 cx: &TestAppContext,
1823 ) -> Vec<HighlightStyle> {
1824 editor.read_with(cx, |editor, cx| {
1825 editor
1826 .display_map
1827 .read(cx)
1828 .semantic_token_highlights
1829 .iter()
1830 .flat_map(|(_, (v, interner))| {
1831 v.iter().map(|highlights| interner[highlights.style])
1832 })
1833 .collect()
1834 })
1835 }
1836}