1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::LanguageSettings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_excerpts(true, cx)
152 .into_values()
153 .map(|(buffer, ..)| buffer)
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
159 .semantic_tokens
160 .enabled()
161 {
162 Some((editor_buffer_id, editor_buffer))
163 } else {
164 None
165 }
166 })
167 .collect::<HashMap<_, _>>();
168
169 for buffer_with_disabled_tokens in self
170 .display_map
171 .read(cx)
172 .semantic_token_highlights
173 .keys()
174 .copied()
175 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
176 .filter(|buffer_id| {
177 !self
178 .buffer
179 .read(cx)
180 .buffer(*buffer_id)
181 .is_some_and(|buffer| {
182 let buffer = buffer.read(cx);
183 LanguageSettings::for_buffer(&buffer, cx)
184 .semantic_tokens
185 .enabled()
186 })
187 })
188 .collect::<Vec<_>>()
189 {
190 self.semantic_token_state
191 .invalidate_buffer(&buffer_with_disabled_tokens);
192 self.display_map.update(cx, |display_map, _| {
193 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
194 });
195 }
196
197 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
198 cx.background_executor()
199 .timer(Duration::from_millis(50))
200 .await;
201 let Some(all_semantic_tokens_task) = editor
202 .update(cx, |editor, cx| {
203 buffers_to_query
204 .into_iter()
205 .filter_map(|(buffer_id, buffer)| {
206 let known_version = editor
207 .semantic_token_state
208 .fetched_for_buffers
209 .get(&buffer_id);
210 let query_version = buffer.read(cx).version();
211 if known_version.is_some_and(|known_version| {
212 !query_version.changed_since(known_version)
213 }) {
214 None
215 } else {
216 sema.semantic_tokens(buffer, for_server, cx).map(
217 |task| async move { (buffer_id, query_version, task.await) },
218 )
219 }
220 })
221 .collect::<Vec<_>>()
222 })
223 .ok()
224 else {
225 return;
226 };
227
228 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
229 editor
230 .update(cx, |editor, cx| {
231 editor.display_map.update(cx, |display_map, _| {
232 for buffer_id in invalidate_semantic_highlights_for_buffers {
233 display_map.invalidate_semantic_highlights(buffer_id);
234 editor.semantic_token_state.invalidate_buffer(&buffer_id);
235 }
236 });
237
238 if all_semantic_tokens.is_empty() {
239 return;
240 }
241 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
242
243 for (buffer_id, query_version, tokens) in all_semantic_tokens {
244 let tokens = match tokens {
245 Ok(BufferSemanticTokens {
246 tokens: Some(tokens),
247 }) => tokens,
248 Ok(BufferSemanticTokens { tokens: None }) => {
249 editor.display_map.update(cx, |display_map, _| {
250 display_map.invalidate_semantic_highlights(buffer_id);
251 });
252 continue;
253 }
254 Err(e) => {
255 log::error!(
256 "Failed to fetch semantic tokens for buffer \
257 {buffer_id:?}: {e:#}"
258 );
259 continue;
260 }
261 };
262
263 match editor
264 .semantic_token_state
265 .fetched_for_buffers
266 .entry(buffer_id)
267 {
268 hash_map::Entry::Occupied(mut o) => {
269 if query_version.changed_since(o.get()) {
270 o.insert(query_version);
271 } else {
272 continue;
273 }
274 }
275 hash_map::Entry::Vacant(v) => {
276 v.insert(query_version);
277 }
278 }
279
280 let language_name = editor
281 .buffer()
282 .read(cx)
283 .buffer(buffer_id)
284 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
285
286 editor.display_map.update(cx, |display_map, cx| {
287 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
288 let mut token_highlights = Vec::new();
289 let mut interner = HighlightStyleInterner::default();
290 for (server_id, server_tokens) in tokens {
291 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
292 server_id,
293 language_name.as_ref(),
294 cx,
295 ) else {
296 continue;
297 };
298 token_highlights.reserve(2 * server_tokens.len());
299 token_highlights.extend(buffer_into_editor_highlights(
300 &server_tokens,
301 stylizer,
302 &multi_buffer_snapshot,
303 &mut interner,
304 cx,
305 ));
306 }
307
308 token_highlights.sort_by(|a, b| {
309 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
310 });
311 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
312 buffer_id,
313 (Arc::from(token_highlights), Arc::new(interner)),
314 );
315 });
316 });
317 }
318
319 cx.notify();
320 })
321 .ok();
322 });
323 }
324}
325
326fn buffer_into_editor_highlights<'a, 'b>(
327 buffer_tokens: &'a [BufferSemanticToken],
328 stylizer: &'a SemanticTokenStylizer,
329 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
330 interner: &'b mut HighlightStyleInterner,
331 cx: &'a App,
332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
333 multi_buffer_snapshot
334 .text_anchors_to_visible_anchors(
335 buffer_tokens
336 .iter()
337 .flat_map(|token| [token.range.start, token.range.end]),
338 )
339 .into_iter()
340 .tuples::<(_, _)>()
341 .zip(buffer_tokens)
342 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
343 let range = multi_buffer_start?..multi_buffer_end?;
344 let style = convert_token(
345 stylizer,
346 cx.theme().syntax(),
347 token.token_type,
348 token.token_modifiers,
349 )?;
350 let style = interner.intern(style);
351 Some(SemanticTokenHighlight {
352 range,
353 style,
354 token_type: token.token_type,
355 token_modifiers: token.token_modifiers,
356 server_id: stylizer.server_id(),
357 })
358 })
359}
360
361fn convert_token(
362 stylizer: &SemanticTokenStylizer,
363 theme: &SyntaxTheme,
364 token_type: TokenType,
365 modifiers: u32,
366) -> Option<HighlightStyle> {
367 let rules = stylizer.rules_for_token(token_type)?;
368 let matching = rules.iter().filter(|rule| {
369 rule.token_modifiers
370 .iter()
371 .all(|m| stylizer.has_modifier(modifiers, m))
372 });
373
374 let mut highlight = HighlightStyle::default();
375 let mut empty = true;
376
377 for rule in matching {
378 empty = false;
379
380 let style = rule.style.iter().find_map(|style| theme.get_opt(style));
381
382 macro_rules! overwrite {
383 (
384 highlight.$highlight_field:ident,
385 SemanticTokenRule::$rule_field:ident,
386 $transform:expr $(,)?
387 ) => {
388 highlight.$highlight_field = rule
389 .$rule_field
390 .map($transform)
391 .or_else(|| style.and_then(|s| s.$highlight_field))
392 .or(highlight.$highlight_field)
393 };
394 }
395
396 overwrite!(
397 highlight.color,
398 SemanticTokenRule::foreground_color,
399 Into::into,
400 );
401
402 overwrite!(
403 highlight.background_color,
404 SemanticTokenRule::background_color,
405 Into::into,
406 );
407
408 overwrite!(
409 highlight.font_weight,
410 SemanticTokenRule::font_weight,
411 |w| match w {
412 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
413 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
414 },
415 );
416
417 overwrite!(
418 highlight.font_style,
419 SemanticTokenRule::font_style,
420 |s| match s {
421 SemanticTokenFontStyle::Normal => FontStyle::Normal,
422 SemanticTokenFontStyle::Italic => FontStyle::Italic,
423 },
424 );
425
426 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
427 UnderlineStyle {
428 thickness: 1.0.into(),
429 color: match u {
430 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
431 SemanticTokenColorOverride::InheritForeground(false) => None,
432 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
433 },
434 ..UnderlineStyle::default()
435 }
436 });
437
438 overwrite!(
439 highlight.strikethrough,
440 SemanticTokenRule::strikethrough,
441 |s| StrikethroughStyle {
442 thickness: 1.0.into(),
443 color: match s {
444 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
445 SemanticTokenColorOverride::InheritForeground(false) => None,
446 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
447 },
448 },
449 );
450 }
451
452 if empty { None } else { Some(highlight) }
453}
454
455#[cfg(test)]
456mod tests {
457 use std::{
458 ops::Range,
459 sync::atomic::{self, AtomicUsize},
460 };
461
462 use futures::StreamExt as _;
463 use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
464 use language::{Language, LanguageConfig, LanguageMatcher};
465 use languages::FakeLspAdapter;
466 use multi_buffer::{
467 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
468 };
469 use project::Project;
470 use rope::Point;
471 use serde_json::json;
472 use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
473 use workspace::{MultiWorkspace, WorkspaceHandle as _};
474
475 use crate::{
476 Capability,
477 editor_tests::{init_test, update_test_language_settings},
478 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
479 };
480
481 use super::*;
482
483 #[gpui::test]
484 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
485 init_test(cx, |_| {});
486
487 update_test_language_settings(cx, &|language_settings| {
488 language_settings.languages.0.insert(
489 "Rust".into(),
490 LanguageSettingsContent {
491 semantic_tokens: Some(SemanticTokens::Full),
492 ..LanguageSettingsContent::default()
493 },
494 );
495 });
496
497 let mut cx = EditorLspTestContext::new_rust(
498 lsp::ServerCapabilities {
499 semantic_tokens_provider: Some(
500 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
501 lsp::SemanticTokensOptions {
502 legend: lsp::SemanticTokensLegend {
503 token_types: vec!["function".into()],
504 token_modifiers: Vec::new(),
505 },
506 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
507 ..lsp::SemanticTokensOptions::default()
508 },
509 ),
510 ),
511 ..lsp::ServerCapabilities::default()
512 },
513 cx,
514 )
515 .await;
516
517 let full_counter = Arc::new(AtomicUsize::new(0));
518 let full_counter_clone = full_counter.clone();
519
520 let mut full_request = cx
521 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
522 move |_, _, _| {
523 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
524 async move {
525 Ok(Some(lsp::SemanticTokensResult::Tokens(
526 lsp::SemanticTokens {
527 data: vec![
528 0, // delta_line
529 3, // delta_start
530 4, // length
531 0, // token_type
532 0, // token_modifiers_bitset
533 ],
534 // The server isn't capable of deltas, so even though we sent back
535 // a result ID, the client shouldn't request a delta.
536 result_id: Some("a".into()),
537 },
538 )))
539 }
540 },
541 );
542
543 cx.set_state("ˇfn main() {}");
544 assert!(full_request.next().await.is_some());
545
546 cx.run_until_parked();
547
548 cx.set_state("ˇfn main() { a }");
549 assert!(full_request.next().await.is_some());
550
551 cx.run_until_parked();
552
553 assert_eq!(
554 extract_semantic_highlights(&cx.editor, &cx),
555 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
556 );
557
558 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
559 }
560
561 #[gpui::test]
562 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
563 init_test(cx, |_| {});
564
565 update_test_language_settings(cx, &|language_settings| {
566 language_settings.languages.0.insert(
567 "Rust".into(),
568 LanguageSettingsContent {
569 semantic_tokens: Some(SemanticTokens::Full),
570 ..LanguageSettingsContent::default()
571 },
572 );
573 });
574
575 let mut cx = EditorLspTestContext::new_rust(
576 lsp::ServerCapabilities {
577 semantic_tokens_provider: Some(
578 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
579 lsp::SemanticTokensOptions {
580 legend: lsp::SemanticTokensLegend {
581 token_types: vec!["function".into()],
582 token_modifiers: Vec::new(),
583 },
584 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
585 ..lsp::SemanticTokensOptions::default()
586 },
587 ),
588 ),
589 ..lsp::ServerCapabilities::default()
590 },
591 cx,
592 )
593 .await;
594
595 let full_counter = Arc::new(AtomicUsize::new(0));
596 let full_counter_clone = full_counter.clone();
597
598 let mut full_request = cx
599 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
600 move |_, _, _| {
601 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
602 async move {
603 Ok(Some(lsp::SemanticTokensResult::Tokens(
604 lsp::SemanticTokens {
605 data: vec![
606 0, // delta_line
607 3, // delta_start
608 4, // length
609 0, // token_type
610 0, // token_modifiers_bitset
611 ],
612 result_id: None, // Sending back `None` forces the client to not use deltas.
613 },
614 )))
615 }
616 },
617 );
618
619 cx.set_state("ˇfn main() {}");
620 assert!(full_request.next().await.is_some());
621
622 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
623 task.await;
624
625 cx.set_state("ˇfn main() { a }");
626 assert!(full_request.next().await.is_some());
627
628 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
629 task.await;
630 assert_eq!(
631 extract_semantic_highlights(&cx.editor, &cx),
632 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
633 );
634 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
635 }
636
637 #[gpui::test]
638 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
639 init_test(cx, |_| {});
640
641 update_test_language_settings(cx, &|language_settings| {
642 language_settings.languages.0.insert(
643 "Rust".into(),
644 LanguageSettingsContent {
645 semantic_tokens: Some(SemanticTokens::Full),
646 ..LanguageSettingsContent::default()
647 },
648 );
649 });
650
651 let mut cx = EditorLspTestContext::new_rust(
652 lsp::ServerCapabilities {
653 semantic_tokens_provider: Some(
654 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
655 lsp::SemanticTokensOptions {
656 legend: lsp::SemanticTokensLegend {
657 token_types: vec!["function".into()],
658 token_modifiers: Vec::new(),
659 },
660 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
661 ..lsp::SemanticTokensOptions::default()
662 },
663 ),
664 ),
665 ..lsp::ServerCapabilities::default()
666 },
667 cx,
668 )
669 .await;
670
671 let full_counter = Arc::new(AtomicUsize::new(0));
672 let full_counter_clone = full_counter.clone();
673 let delta_counter = Arc::new(AtomicUsize::new(0));
674 let delta_counter_clone = delta_counter.clone();
675
676 let mut full_request = cx
677 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
678 move |_, _, _| {
679 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
680 async move {
681 Ok(Some(lsp::SemanticTokensResult::Tokens(
682 lsp::SemanticTokens {
683 data: vec![
684 0, // delta_line
685 3, // delta_start
686 4, // length
687 0, // token_type
688 0, // token_modifiers_bitset
689 ],
690 result_id: Some("a".into()),
691 },
692 )))
693 }
694 },
695 );
696
697 let mut delta_request = cx
698 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
699 move |_, params, _| {
700 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
701 assert_eq!(params.previous_result_id, "a");
702 async move {
703 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
704 lsp::SemanticTokensDelta {
705 edits: Vec::new(),
706 result_id: Some("b".into()),
707 },
708 )))
709 }
710 },
711 );
712
713 // Initial request, for the empty buffer.
714 cx.set_state("ˇfn main() {}");
715 assert!(full_request.next().await.is_some());
716 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
717 task.await;
718
719 cx.set_state("ˇfn main() { a }");
720 assert!(delta_request.next().await.is_some());
721 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
722 task.await;
723
724 assert_eq!(
725 extract_semantic_highlights(&cx.editor, &cx),
726 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
727 );
728
729 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
730 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
731 }
732
733 #[gpui::test]
734 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
735 init_test(cx, |_| {});
736
737 update_test_language_settings(cx, &|language_settings| {
738 language_settings.languages.0.insert(
739 "TOML".into(),
740 LanguageSettingsContent {
741 semantic_tokens: Some(SemanticTokens::Full),
742 ..LanguageSettingsContent::default()
743 },
744 );
745 });
746
747 let toml_language = Arc::new(Language::new(
748 LanguageConfig {
749 name: "TOML".into(),
750 matcher: LanguageMatcher {
751 path_suffixes: vec!["toml".into()],
752 ..LanguageMatcher::default()
753 },
754 ..LanguageConfig::default()
755 },
756 None,
757 ));
758
759 // We have 2 language servers for TOML in this test.
760 let toml_legend_1 = lsp::SemanticTokensLegend {
761 token_types: vec!["property".into()],
762 token_modifiers: Vec::new(),
763 };
764 let toml_legend_2 = lsp::SemanticTokensLegend {
765 token_types: vec!["number".into()],
766 token_modifiers: Vec::new(),
767 };
768
769 let app_state = cx.update(workspace::AppState::test);
770
771 cx.update(|cx| {
772 assets::Assets.load_test_fonts(cx);
773 crate::init(cx);
774 workspace::init(app_state.clone(), cx);
775 });
776
777 let project = Project::test(app_state.fs.clone(), [], cx).await;
778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
779
780 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
781 let full_counter_toml_1_clone = full_counter_toml_1.clone();
782 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
783 let full_counter_toml_2_clone = full_counter_toml_2.clone();
784
785 let mut toml_server_1 = language_registry.register_fake_lsp(
786 toml_language.name(),
787 FakeLspAdapter {
788 name: "toml1",
789 capabilities: lsp::ServerCapabilities {
790 semantic_tokens_provider: Some(
791 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
792 lsp::SemanticTokensOptions {
793 legend: toml_legend_1,
794 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
795 ..lsp::SemanticTokensOptions::default()
796 },
797 ),
798 ),
799 ..lsp::ServerCapabilities::default()
800 },
801 initializer: Some(Box::new({
802 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
803 move |fake_server| {
804 let full_counter = full_counter_toml_1_clone.clone();
805 fake_server
806 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
807 move |_, _| {
808 full_counter.fetch_add(1, atomic::Ordering::Release);
809 async move {
810 Ok(Some(lsp::SemanticTokensResult::Tokens(
811 lsp::SemanticTokens {
812 // highlight 'a' as a property
813 data: vec![
814 0, // delta_line
815 0, // delta_start
816 1, // length
817 0, // token_type
818 0, // token_modifiers_bitset
819 ],
820 result_id: Some("a".into()),
821 },
822 )))
823 }
824 },
825 );
826 }
827 })),
828 ..FakeLspAdapter::default()
829 },
830 );
831 let mut toml_server_2 = language_registry.register_fake_lsp(
832 toml_language.name(),
833 FakeLspAdapter {
834 name: "toml2",
835 capabilities: lsp::ServerCapabilities {
836 semantic_tokens_provider: Some(
837 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
838 lsp::SemanticTokensOptions {
839 legend: toml_legend_2,
840 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
841 ..lsp::SemanticTokensOptions::default()
842 },
843 ),
844 ),
845 ..lsp::ServerCapabilities::default()
846 },
847 initializer: Some(Box::new({
848 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
849 move |fake_server| {
850 let full_counter = full_counter_toml_2_clone.clone();
851 fake_server
852 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
853 move |_, _| {
854 full_counter.fetch_add(1, atomic::Ordering::Release);
855 async move {
856 Ok(Some(lsp::SemanticTokensResult::Tokens(
857 lsp::SemanticTokens {
858 // highlight '3' as a literal
859 data: vec![
860 0, // delta_line
861 4, // delta_start
862 1, // length
863 0, // token_type
864 0, // token_modifiers_bitset
865 ],
866 result_id: Some("a".into()),
867 },
868 )))
869 }
870 },
871 );
872 }
873 })),
874 ..FakeLspAdapter::default()
875 },
876 );
877 language_registry.add(toml_language.clone());
878
879 app_state
880 .fs
881 .as_fake()
882 .insert_tree(
883 EditorLspTestContext::root_path(),
884 json!({
885 ".git": {},
886 "dir": {
887 "foo.toml": "a = 1\nb = 2\n",
888 }
889 }),
890 )
891 .await;
892
893 let (multi_workspace, cx) =
894 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
895 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
896 project
897 .update(cx, |project, cx| {
898 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
899 })
900 .await
901 .unwrap();
902 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
903 .await;
904
905 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
906 let toml_item = workspace
907 .update_in(cx, |workspace, window, cx| {
908 workspace.open_path(toml_file, None, true, window, cx)
909 })
910 .await
911 .expect("Could not open test file");
912
913 let editor = cx.update(|_, cx| {
914 toml_item
915 .act_as::<Editor>(cx)
916 .expect("Opened test file wasn't an editor")
917 });
918
919 editor.update_in(cx, |editor, window, cx| {
920 let nav_history = workspace
921 .read(cx)
922 .active_pane()
923 .read(cx)
924 .nav_history_for_item(&cx.entity());
925 editor.set_nav_history(Some(nav_history));
926 window.focus(&editor.focus_handle(cx), cx)
927 });
928
929 let _toml_server_1 = toml_server_1.next().await.unwrap();
930 let _toml_server_2 = toml_server_2.next().await.unwrap();
931
932 // Trigger semantic tokens.
933 editor.update_in(cx, |editor, _, cx| {
934 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
935 });
936 cx.executor().advance_clock(Duration::from_millis(200));
937 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
938 cx.run_until_parked();
939 task.await;
940
941 assert_eq!(
942 extract_semantic_highlights(&editor, &cx),
943 vec![
944 MultiBufferOffset(0)..MultiBufferOffset(1),
945 MultiBufferOffset(4)..MultiBufferOffset(5),
946 ]
947 );
948
949 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
950 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
951 }
952
953 #[gpui::test]
954 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
955 init_test(cx, |_| {});
956
957 update_test_language_settings(cx, &|language_settings| {
958 language_settings.languages.0.insert(
959 "TOML".into(),
960 LanguageSettingsContent {
961 semantic_tokens: Some(SemanticTokens::Full),
962 ..LanguageSettingsContent::default()
963 },
964 );
965 language_settings.languages.0.insert(
966 "Rust".into(),
967 LanguageSettingsContent {
968 semantic_tokens: Some(SemanticTokens::Full),
969 ..LanguageSettingsContent::default()
970 },
971 );
972 });
973
974 let toml_language = Arc::new(Language::new(
975 LanguageConfig {
976 name: "TOML".into(),
977 matcher: LanguageMatcher {
978 path_suffixes: vec!["toml".into()],
979 ..LanguageMatcher::default()
980 },
981 ..LanguageConfig::default()
982 },
983 None,
984 ));
985 let rust_language = Arc::new(Language::new(
986 LanguageConfig {
987 name: "Rust".into(),
988 matcher: LanguageMatcher {
989 path_suffixes: vec!["rs".into()],
990 ..LanguageMatcher::default()
991 },
992 ..LanguageConfig::default()
993 },
994 None,
995 ));
996
997 let toml_legend = lsp::SemanticTokensLegend {
998 token_types: vec!["property".into()],
999 token_modifiers: Vec::new(),
1000 };
1001 let rust_legend = lsp::SemanticTokensLegend {
1002 token_types: vec!["constant".into()],
1003 token_modifiers: Vec::new(),
1004 };
1005
1006 let app_state = cx.update(workspace::AppState::test);
1007
1008 cx.update(|cx| {
1009 assets::Assets.load_test_fonts(cx);
1010 crate::init(cx);
1011 workspace::init(app_state.clone(), cx);
1012 });
1013
1014 let project = Project::test(app_state.fs.clone(), [], cx).await;
1015 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1016 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1017 let full_counter_toml_clone = full_counter_toml.clone();
1018
1019 let mut toml_server = language_registry.register_fake_lsp(
1020 toml_language.name(),
1021 FakeLspAdapter {
1022 name: "toml",
1023 capabilities: lsp::ServerCapabilities {
1024 semantic_tokens_provider: Some(
1025 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1026 lsp::SemanticTokensOptions {
1027 legend: toml_legend,
1028 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1029 ..lsp::SemanticTokensOptions::default()
1030 },
1031 ),
1032 ),
1033 ..lsp::ServerCapabilities::default()
1034 },
1035 initializer: Some(Box::new({
1036 let full_counter_toml_clone = full_counter_toml_clone.clone();
1037 move |fake_server| {
1038 let full_counter = full_counter_toml_clone.clone();
1039 fake_server
1040 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1041 move |_, _| {
1042 full_counter.fetch_add(1, atomic::Ordering::Release);
1043 async move {
1044 Ok(Some(lsp::SemanticTokensResult::Tokens(
1045 lsp::SemanticTokens {
1046 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1047 data: vec![
1048 0, // delta_line (line 0)
1049 0, // delta_start
1050 1, // length
1051 0, // token_type
1052 0, // token_modifiers_bitset
1053 1, // delta_line (line 1)
1054 0, // delta_start
1055 1, // length
1056 0, // token_type
1057 0, // token_modifiers_bitset
1058 1, // delta_line (line 2)
1059 0, // delta_start
1060 1, // length
1061 0, // token_type
1062 0, // token_modifiers_bitset
1063 ],
1064 result_id: Some("a".into()),
1065 },
1066 )))
1067 }
1068 },
1069 );
1070 }
1071 })),
1072 ..FakeLspAdapter::default()
1073 },
1074 );
1075 language_registry.add(toml_language.clone());
1076 let mut rust_server = language_registry.register_fake_lsp(
1077 rust_language.name(),
1078 FakeLspAdapter {
1079 name: "rust",
1080 capabilities: lsp::ServerCapabilities {
1081 semantic_tokens_provider: Some(
1082 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1083 lsp::SemanticTokensOptions {
1084 legend: rust_legend,
1085 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1086 ..lsp::SemanticTokensOptions::default()
1087 },
1088 ),
1089 ),
1090 ..lsp::ServerCapabilities::default()
1091 },
1092 ..FakeLspAdapter::default()
1093 },
1094 );
1095 language_registry.add(rust_language.clone());
1096
1097 app_state
1098 .fs
1099 .as_fake()
1100 .insert_tree(
1101 EditorLspTestContext::root_path(),
1102 json!({
1103 ".git": {},
1104 "dir": {
1105 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1106 "bar.rs": "const c: usize = 3;\n",
1107 }
1108 }),
1109 )
1110 .await;
1111
1112 let (multi_workspace, cx) =
1113 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1114 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1115 project
1116 .update(cx, |project, cx| {
1117 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1118 })
1119 .await
1120 .unwrap();
1121 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1122 .await;
1123
1124 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1125 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1126 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1127 (
1128 workspace.open_path(toml_file, None, true, window, cx),
1129 workspace.open_path(rust_file, None, true, window, cx),
1130 )
1131 });
1132 let toml_item = toml_item.await.expect("Could not open test file");
1133 let rust_item = rust_item.await.expect("Could not open test file");
1134
1135 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1136 (
1137 toml_item
1138 .act_as::<Editor>(cx)
1139 .expect("Opened test file wasn't an editor"),
1140 rust_item
1141 .act_as::<Editor>(cx)
1142 .expect("Opened test file wasn't an editor"),
1143 )
1144 });
1145 let toml_buffer = cx.read(|cx| {
1146 toml_editor
1147 .read(cx)
1148 .buffer()
1149 .read(cx)
1150 .as_singleton()
1151 .unwrap()
1152 });
1153 let rust_buffer = cx.read(|cx| {
1154 rust_editor
1155 .read(cx)
1156 .buffer()
1157 .read(cx)
1158 .as_singleton()
1159 .unwrap()
1160 });
1161 let multibuffer = cx.new(|cx| {
1162 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1163 multibuffer.set_excerpts_for_path(
1164 PathKey::sorted(0),
1165 toml_buffer.clone(),
1166 [Point::new(0, 0)..Point::new(0, 4)],
1167 0,
1168 cx,
1169 );
1170 multibuffer.set_excerpts_for_path(
1171 PathKey::sorted(1),
1172 rust_buffer.clone(),
1173 [Point::new(0, 0)..Point::new(0, 4)],
1174 0,
1175 cx,
1176 );
1177 multibuffer
1178 });
1179
1180 let editor = workspace.update_in(cx, |workspace, window, cx| {
1181 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1182 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1183 editor
1184 });
1185 editor.update_in(cx, |editor, window, cx| {
1186 let nav_history = workspace
1187 .read(cx)
1188 .active_pane()
1189 .read(cx)
1190 .nav_history_for_item(&cx.entity());
1191 editor.set_nav_history(Some(nav_history));
1192 window.focus(&editor.focus_handle(cx), cx)
1193 });
1194
1195 let _toml_server = toml_server.next().await.unwrap();
1196 let _rust_server = rust_server.next().await.unwrap();
1197
1198 // Initial request.
1199 cx.executor().advance_clock(Duration::from_millis(200));
1200 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1201 cx.run_until_parked();
1202 task.await;
1203 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1204 cx.run_until_parked();
1205
1206 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1207 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1208 assert_eq!(
1209 extract_semantic_highlights(&editor, &cx),
1210 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1211 );
1212
1213 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1214 let toml_excerpt_id =
1215 editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1216 editor.update_in(cx, |editor, _, cx| {
1217 editor.buffer().update(cx, |buffer, cx| {
1218 buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1219 });
1220 });
1221
1222 // Wait for semantic tokens to be re-fetched after expansion.
1223 cx.executor().advance_clock(Duration::from_millis(200));
1224 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1225 cx.run_until_parked();
1226 task.await;
1227
1228 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1229 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1230 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1231 assert_eq!(
1232 extract_semantic_highlights(&editor, &cx),
1233 vec![
1234 MultiBufferOffset(0)..MultiBufferOffset(1),
1235 MultiBufferOffset(6)..MultiBufferOffset(7),
1236 MultiBufferOffset(12)..MultiBufferOffset(13),
1237 ]
1238 );
1239 }
1240
1241 fn extract_semantic_highlights(
1242 editor: &Entity<Editor>,
1243 cx: &TestAppContext,
1244 ) -> Vec<Range<MultiBufferOffset>> {
1245 editor.read_with(cx, |editor, cx| {
1246 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1247 editor
1248 .display_map
1249 .read(cx)
1250 .semantic_token_highlights
1251 .iter()
1252 .flat_map(|(_, (v, _))| v.iter())
1253 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1254 .collect()
1255 })
1256 }
1257
1258 #[gpui::test]
1259 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1260 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1261 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1262
1263 init_test(cx, |_| {});
1264
1265 update_test_language_settings(cx, &|language_settings| {
1266 language_settings.languages.0.insert(
1267 "Rust".into(),
1268 LanguageSettingsContent {
1269 semantic_tokens: Some(SemanticTokens::Full),
1270 ..LanguageSettingsContent::default()
1271 },
1272 );
1273 });
1274
1275 let mut cx = EditorLspTestContext::new_rust(
1276 lsp::ServerCapabilities {
1277 semantic_tokens_provider: Some(
1278 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1279 lsp::SemanticTokensOptions {
1280 legend: lsp::SemanticTokensLegend {
1281 token_types: Vec::from(["function".into()]),
1282 token_modifiers: Vec::new(),
1283 },
1284 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1285 ..lsp::SemanticTokensOptions::default()
1286 },
1287 ),
1288 ),
1289 ..lsp::ServerCapabilities::default()
1290 },
1291 cx,
1292 )
1293 .await;
1294
1295 let mut full_request = cx
1296 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1297 move |_, _, _| {
1298 async move {
1299 Ok(Some(lsp::SemanticTokensResult::Tokens(
1300 lsp::SemanticTokens {
1301 data: vec![
1302 0, // delta_line
1303 3, // delta_start
1304 4, // length
1305 0, // token_type (function)
1306 0, // token_modifiers_bitset
1307 ],
1308 result_id: None,
1309 },
1310 )))
1311 }
1312 },
1313 );
1314
1315 // Trigger initial semantic tokens fetch
1316 cx.set_state("ˇfn main() {}");
1317 full_request.next().await;
1318 cx.run_until_parked();
1319
1320 // Verify initial highlights exist (with no custom color yet)
1321 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1322 assert_eq!(
1323 initial_ranges,
1324 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1325 "Should have initial semantic token highlights"
1326 );
1327 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1328 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1329 // Initial color should be None or theme default (not red or blue)
1330 let initial_color = initial_styles[0].color;
1331
1332 // Set a custom foreground color for function tokens via settings.json
1333 let red_color = Rgba {
1334 r: 1.0,
1335 g: 0.0,
1336 b: 0.0,
1337 a: 1.0,
1338 };
1339 cx.update(|_, cx| {
1340 SettingsStore::update_global(cx, |store, cx| {
1341 store.update_user_settings(cx, |settings| {
1342 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1343 semantic_token_rules: Some(SemanticTokenRules {
1344 rules: Vec::from([SemanticTokenRule {
1345 token_type: Some("function".to_string()),
1346 foreground_color: Some(red_color),
1347 ..SemanticTokenRule::default()
1348 }]),
1349 }),
1350 ..GlobalLspSettingsContent::default()
1351 });
1352 });
1353 });
1354 });
1355
1356 // Trigger a refetch by making an edit (which forces semantic tokens update)
1357 cx.set_state("ˇfn main() { }");
1358 full_request.next().await;
1359 cx.run_until_parked();
1360
1361 // Verify the highlights now have the custom red color
1362 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1363 assert_eq!(
1364 styles_after_settings_change.len(),
1365 1,
1366 "Should still have one highlight"
1367 );
1368 assert_eq!(
1369 styles_after_settings_change[0].color,
1370 Some(Hsla::from(red_color)),
1371 "Highlight should have the custom red color from settings.json"
1372 );
1373 assert_ne!(
1374 styles_after_settings_change[0].color, initial_color,
1375 "Color should have changed from initial"
1376 );
1377 }
1378
1379 #[gpui::test]
1380 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1381 use collections::IndexMap;
1382 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1383 use theme::{HighlightStyleContent, ThemeStyleContent};
1384
1385 init_test(cx, |_| {});
1386
1387 update_test_language_settings(cx, &|language_settings| {
1388 language_settings.languages.0.insert(
1389 "Rust".into(),
1390 LanguageSettingsContent {
1391 semantic_tokens: Some(SemanticTokens::Full),
1392 ..LanguageSettingsContent::default()
1393 },
1394 );
1395 });
1396
1397 let mut cx = EditorLspTestContext::new_rust(
1398 lsp::ServerCapabilities {
1399 semantic_tokens_provider: Some(
1400 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1401 lsp::SemanticTokensOptions {
1402 legend: lsp::SemanticTokensLegend {
1403 token_types: Vec::from(["function".into()]),
1404 token_modifiers: Vec::new(),
1405 },
1406 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1407 ..lsp::SemanticTokensOptions::default()
1408 },
1409 ),
1410 ),
1411 ..lsp::ServerCapabilities::default()
1412 },
1413 cx,
1414 )
1415 .await;
1416
1417 let mut full_request = cx
1418 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1419 move |_, _, _| async move {
1420 Ok(Some(lsp::SemanticTokensResult::Tokens(
1421 lsp::SemanticTokens {
1422 data: vec![
1423 0, // delta_line
1424 3, // delta_start
1425 4, // length
1426 0, // token_type (function)
1427 0, // token_modifiers_bitset
1428 ],
1429 result_id: None,
1430 },
1431 )))
1432 },
1433 );
1434
1435 cx.set_state("ˇfn main() {}");
1436 full_request.next().await;
1437 cx.run_until_parked();
1438
1439 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1440 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1441 let initial_color = initial_styles[0].color;
1442
1443 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1444 // which fires theme_changed → refresh_semantic_token_highlights.
1445 let red_color: Hsla = Rgba {
1446 r: 1.0,
1447 g: 0.0,
1448 b: 0.0,
1449 a: 1.0,
1450 }
1451 .into();
1452 cx.update(|_, cx| {
1453 SettingsStore::update_global(cx, |store, cx| {
1454 store.update_user_settings(cx, |settings| {
1455 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1456 syntax: IndexMap::from_iter([(
1457 "function".to_string(),
1458 HighlightStyleContent {
1459 color: Some("#ff0000".to_string()),
1460 background_color: None,
1461 font_style: None,
1462 font_weight: None,
1463 },
1464 )]),
1465 ..ThemeStyleContent::default()
1466 });
1467 });
1468 });
1469 });
1470
1471 cx.executor().advance_clock(Duration::from_millis(200));
1472 cx.run_until_parked();
1473
1474 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1475 assert_eq!(styles_after_override.len(), 1);
1476 assert_eq!(
1477 styles_after_override[0].color,
1478 Some(red_color),
1479 "Highlight should have red color from theme override"
1480 );
1481 assert_ne!(
1482 styles_after_override[0].color, initial_color,
1483 "Color should have changed from initial"
1484 );
1485
1486 // Changing the override to a different color also restyles.
1487 let blue_color: Hsla = Rgba {
1488 r: 0.0,
1489 g: 0.0,
1490 b: 1.0,
1491 a: 1.0,
1492 }
1493 .into();
1494 cx.update(|_, cx| {
1495 SettingsStore::update_global(cx, |store, cx| {
1496 store.update_user_settings(cx, |settings| {
1497 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1498 syntax: IndexMap::from_iter([(
1499 "function".to_string(),
1500 HighlightStyleContent {
1501 color: Some("#0000ff".to_string()),
1502 background_color: None,
1503 font_style: None,
1504 font_weight: None,
1505 },
1506 )]),
1507 ..ThemeStyleContent::default()
1508 });
1509 });
1510 });
1511 });
1512
1513 cx.executor().advance_clock(Duration::from_millis(200));
1514 cx.run_until_parked();
1515
1516 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1517 assert_eq!(styles_after_second_override.len(), 1);
1518 assert_eq!(
1519 styles_after_second_override[0].color,
1520 Some(blue_color),
1521 "Highlight should have blue color from updated theme override"
1522 );
1523
1524 // Removing overrides reverts to the original theme color.
1525 cx.update(|_, cx| {
1526 SettingsStore::update_global(cx, |store, cx| {
1527 store.update_user_settings(cx, |settings| {
1528 settings.theme.experimental_theme_overrides = None;
1529 });
1530 });
1531 });
1532
1533 cx.executor().advance_clock(Duration::from_millis(200));
1534 cx.run_until_parked();
1535
1536 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1537 assert_eq!(styles_after_clear.len(), 1);
1538 assert_eq!(
1539 styles_after_clear[0].color, initial_color,
1540 "Highlight should revert to initial color after clearing overrides"
1541 );
1542 }
1543
1544 #[gpui::test]
1545 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1546 use collections::IndexMap;
1547 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1548 use theme::{HighlightStyleContent, ThemeStyleContent};
1549 use ui::ActiveTheme as _;
1550
1551 init_test(cx, |_| {});
1552
1553 update_test_language_settings(cx, &|language_settings| {
1554 language_settings.languages.0.insert(
1555 "Rust".into(),
1556 LanguageSettingsContent {
1557 semantic_tokens: Some(SemanticTokens::Full),
1558 ..LanguageSettingsContent::default()
1559 },
1560 );
1561 });
1562
1563 let mut cx = EditorLspTestContext::new_rust(
1564 lsp::ServerCapabilities {
1565 semantic_tokens_provider: Some(
1566 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1567 lsp::SemanticTokensOptions {
1568 legend: lsp::SemanticTokensLegend {
1569 token_types: Vec::from(["function".into()]),
1570 token_modifiers: Vec::new(),
1571 },
1572 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1573 ..lsp::SemanticTokensOptions::default()
1574 },
1575 ),
1576 ),
1577 ..lsp::ServerCapabilities::default()
1578 },
1579 cx,
1580 )
1581 .await;
1582
1583 let mut full_request = cx
1584 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1585 move |_, _, _| async move {
1586 Ok(Some(lsp::SemanticTokensResult::Tokens(
1587 lsp::SemanticTokens {
1588 data: vec![
1589 0, // delta_line
1590 3, // delta_start
1591 4, // length
1592 0, // token_type (function)
1593 0, // token_modifiers_bitset
1594 ],
1595 result_id: None,
1596 },
1597 )))
1598 },
1599 );
1600
1601 cx.set_state("ˇfn main() {}");
1602 full_request.next().await;
1603 cx.run_until_parked();
1604
1605 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1606 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1607 let initial_color = initial_styles[0].color;
1608
1609 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1610 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1611 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1612 let green_color: Hsla = Rgba {
1613 r: 0.0,
1614 g: 1.0,
1615 b: 0.0,
1616 a: 1.0,
1617 }
1618 .into();
1619 cx.update(|_, cx| {
1620 SettingsStore::update_global(cx, |store, cx| {
1621 store.update_user_settings(cx, |settings| {
1622 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1623 theme_name.clone(),
1624 ThemeStyleContent {
1625 syntax: IndexMap::from_iter([(
1626 "function".to_string(),
1627 HighlightStyleContent {
1628 color: Some("#00ff00".to_string()),
1629 background_color: None,
1630 font_style: None,
1631 font_weight: None,
1632 },
1633 )]),
1634 ..ThemeStyleContent::default()
1635 },
1636 )]);
1637 });
1638 });
1639 });
1640
1641 cx.executor().advance_clock(Duration::from_millis(200));
1642 cx.run_until_parked();
1643
1644 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1645 assert_eq!(styles_after_override.len(), 1);
1646 assert_eq!(
1647 styles_after_override[0].color,
1648 Some(green_color),
1649 "Highlight should have green color from per-theme override"
1650 );
1651 assert_ne!(
1652 styles_after_override[0].color, initial_color,
1653 "Color should have changed from initial"
1654 );
1655 }
1656
1657 #[gpui::test]
1658 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1659 init_test(cx, |_| {});
1660
1661 update_test_language_settings(cx, &|language_settings| {
1662 language_settings.languages.0.insert(
1663 "Rust".into(),
1664 LanguageSettingsContent {
1665 semantic_tokens: Some(SemanticTokens::Full),
1666 ..LanguageSettingsContent::default()
1667 },
1668 );
1669 });
1670
1671 let mut cx = EditorLspTestContext::new_rust(
1672 lsp::ServerCapabilities {
1673 semantic_tokens_provider: Some(
1674 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1675 lsp::SemanticTokensOptions {
1676 legend: lsp::SemanticTokensLegend {
1677 token_types: vec!["function".into()],
1678 token_modifiers: Vec::new(),
1679 },
1680 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1681 ..lsp::SemanticTokensOptions::default()
1682 },
1683 ),
1684 ),
1685 ..lsp::ServerCapabilities::default()
1686 },
1687 cx,
1688 )
1689 .await;
1690
1691 let mut full_request = cx
1692 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1693 move |_, _, _| async move {
1694 Ok(Some(lsp::SemanticTokensResult::Tokens(
1695 lsp::SemanticTokens {
1696 data: vec![
1697 0, // delta_line
1698 3, // delta_start
1699 4, // length
1700 0, // token_type
1701 0, // token_modifiers_bitset
1702 ],
1703 result_id: None,
1704 },
1705 )))
1706 },
1707 );
1708
1709 cx.set_state("ˇfn main() {}");
1710 assert!(full_request.next().await.is_some());
1711 cx.run_until_parked();
1712
1713 assert_eq!(
1714 extract_semantic_highlights(&cx.editor, &cx),
1715 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1716 "Semantic tokens should be present before stopping the server"
1717 );
1718
1719 cx.update_editor(|editor, _, cx| {
1720 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1721 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1722 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1723 })
1724 });
1725 cx.executor().advance_clock(Duration::from_millis(200));
1726 cx.run_until_parked();
1727
1728 assert_eq!(
1729 extract_semantic_highlights(&cx.editor, &cx),
1730 Vec::new(),
1731 "Semantic tokens should be cleared after stopping the server"
1732 );
1733 }
1734
1735 #[gpui::test]
1736 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1737 init_test(cx, |_| {});
1738
1739 update_test_language_settings(cx, &|language_settings| {
1740 language_settings.languages.0.insert(
1741 "Rust".into(),
1742 LanguageSettingsContent {
1743 semantic_tokens: Some(SemanticTokens::Full),
1744 ..LanguageSettingsContent::default()
1745 },
1746 );
1747 });
1748
1749 let mut cx = EditorLspTestContext::new_rust(
1750 lsp::ServerCapabilities {
1751 semantic_tokens_provider: Some(
1752 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1753 lsp::SemanticTokensOptions {
1754 legend: lsp::SemanticTokensLegend {
1755 token_types: vec!["function".into()],
1756 token_modifiers: Vec::new(),
1757 },
1758 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1759 ..lsp::SemanticTokensOptions::default()
1760 },
1761 ),
1762 ),
1763 ..lsp::ServerCapabilities::default()
1764 },
1765 cx,
1766 )
1767 .await;
1768
1769 let mut full_request = cx
1770 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1771 move |_, _, _| async move {
1772 Ok(Some(lsp::SemanticTokensResult::Tokens(
1773 lsp::SemanticTokens {
1774 data: vec![
1775 0, // delta_line
1776 3, // delta_start
1777 4, // length
1778 0, // token_type
1779 0, // token_modifiers_bitset
1780 ],
1781 result_id: None,
1782 },
1783 )))
1784 },
1785 );
1786
1787 cx.set_state("ˇfn main() {}");
1788 assert!(full_request.next().await.is_some());
1789 cx.run_until_parked();
1790
1791 assert_eq!(
1792 extract_semantic_highlights(&cx.editor, &cx),
1793 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1794 "Semantic tokens should be present before disabling the setting"
1795 );
1796
1797 update_test_language_settings(&mut cx, &|language_settings| {
1798 language_settings.languages.0.insert(
1799 "Rust".into(),
1800 LanguageSettingsContent {
1801 semantic_tokens: Some(SemanticTokens::Off),
1802 ..LanguageSettingsContent::default()
1803 },
1804 );
1805 });
1806 cx.executor().advance_clock(Duration::from_millis(200));
1807 cx.run_until_parked();
1808
1809 assert_eq!(
1810 extract_semantic_highlights(&cx.editor, &cx),
1811 Vec::new(),
1812 "Semantic tokens should be cleared after disabling the setting"
1813 );
1814 }
1815
1816 fn extract_semantic_highlight_styles(
1817 editor: &Entity<Editor>,
1818 cx: &TestAppContext,
1819 ) -> Vec<HighlightStyle> {
1820 editor.read_with(cx, |editor, cx| {
1821 editor
1822 .display_map
1823 .read(cx)
1824 .semantic_token_highlights
1825 .iter()
1826 .flat_map(|(_, (v, interner))| {
1827 v.iter().map(|highlights| interner[highlights.style])
1828 })
1829 .collect()
1830 })
1831 }
1832}