1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::language_settings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.mode().is_full() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_excerpts(true, cx)
152 .into_values()
153 .map(|(buffer, ..)| buffer)
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && language_settings(
159 editor_buffer.read(cx).language().map(|l| l.name()),
160 editor_buffer.read(cx).file(),
161 cx,
162 )
163 .semantic_tokens
164 .enabled()
165 {
166 Some((editor_buffer_id, editor_buffer))
167 } else {
168 None
169 }
170 })
171 .collect::<HashMap<_, _>>();
172
173 for buffer_with_disabled_tokens in self
174 .display_map
175 .read(cx)
176 .semantic_token_highlights
177 .keys()
178 .copied()
179 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
180 .filter(|buffer_id| {
181 !self
182 .buffer
183 .read(cx)
184 .buffer(*buffer_id)
185 .is_some_and(|buffer| {
186 let buffer = buffer.read(cx);
187 language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
188 .semantic_tokens
189 .enabled()
190 })
191 })
192 .collect::<Vec<_>>()
193 {
194 self.semantic_token_state
195 .invalidate_buffer(&buffer_with_disabled_tokens);
196 self.display_map.update(cx, |display_map, _| {
197 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
198 });
199 }
200
201 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
202 cx.background_executor()
203 .timer(Duration::from_millis(50))
204 .await;
205 let Some(all_semantic_tokens_task) = editor
206 .update(cx, |editor, cx| {
207 buffers_to_query
208 .into_iter()
209 .filter_map(|(buffer_id, buffer)| {
210 let known_version = editor
211 .semantic_token_state
212 .fetched_for_buffers
213 .get(&buffer_id);
214 let query_version = buffer.read(cx).version();
215 if known_version.is_some_and(|known_version| {
216 !query_version.changed_since(known_version)
217 }) {
218 None
219 } else {
220 let task = sema.semantic_tokens(buffer, for_server, cx);
221 Some(async move { (buffer_id, query_version, task.await) })
222 }
223 })
224 .collect::<Vec<_>>()
225 })
226 .ok()
227 else {
228 return;
229 };
230
231 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
232 editor
233 .update(cx, |editor, cx| {
234 editor.display_map.update(cx, |display_map, _| {
235 for buffer_id in invalidate_semantic_highlights_for_buffers {
236 display_map.invalidate_semantic_highlights(buffer_id);
237 editor.semantic_token_state.invalidate_buffer(&buffer_id);
238 }
239 });
240
241 if all_semantic_tokens.is_empty() {
242 return;
243 }
244 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
245
246 for (buffer_id, query_version, tokens) in all_semantic_tokens {
247 let tokens = match tokens {
248 Ok(BufferSemanticTokens {
249 tokens: Some(tokens),
250 }) => tokens,
251 Ok(BufferSemanticTokens { tokens: None }) => {
252 editor.display_map.update(cx, |display_map, _| {
253 display_map.invalidate_semantic_highlights(buffer_id);
254 });
255 continue;
256 }
257 Err(e) => {
258 log::error!(
259 "Failed to fetch semantic tokens for buffer \
260 {buffer_id:?}: {e:#}"
261 );
262 continue;
263 }
264 };
265
266 match editor
267 .semantic_token_state
268 .fetched_for_buffers
269 .entry(buffer_id)
270 {
271 hash_map::Entry::Occupied(mut o) => {
272 if query_version.changed_since(o.get()) {
273 o.insert(query_version);
274 } else {
275 continue;
276 }
277 }
278 hash_map::Entry::Vacant(v) => {
279 v.insert(query_version);
280 }
281 }
282
283 let language_name = editor
284 .buffer()
285 .read(cx)
286 .buffer(buffer_id)
287 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
288
289 editor.display_map.update(cx, |display_map, cx| {
290 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
291 let mut token_highlights = Vec::new();
292 let mut interner = HighlightStyleInterner::default();
293 for (server_id, server_tokens) in tokens {
294 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
295 server_id,
296 language_name.as_ref(),
297 cx,
298 ) else {
299 continue;
300 };
301 token_highlights.reserve(2 * server_tokens.len());
302 token_highlights.extend(buffer_into_editor_highlights(
303 &server_tokens,
304 stylizer,
305 &multi_buffer_snapshot,
306 &mut interner,
307 cx,
308 ));
309 }
310
311 token_highlights.sort_by(|a, b| {
312 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
313 });
314 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
315 buffer_id,
316 (Arc::from(token_highlights), Arc::new(interner)),
317 );
318 });
319 });
320 }
321
322 cx.notify();
323 })
324 .ok();
325 });
326 }
327}
328
329fn buffer_into_editor_highlights<'a, 'b>(
330 buffer_tokens: &'a [BufferSemanticToken],
331 stylizer: &'a SemanticTokenStylizer,
332 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
333 interner: &'b mut HighlightStyleInterner,
334 cx: &'a App,
335) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
336 multi_buffer_snapshot
337 .text_anchors_to_visible_anchors(
338 buffer_tokens
339 .iter()
340 .flat_map(|token| [token.range.start, token.range.end]),
341 )
342 .into_iter()
343 .tuples::<(_, _)>()
344 .zip(buffer_tokens)
345 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
346 let range = multi_buffer_start?..multi_buffer_end?;
347 let style = convert_token(
348 stylizer,
349 cx.theme().syntax(),
350 token.token_type,
351 token.token_modifiers,
352 )?;
353 let style = interner.intern(style);
354 Some(SemanticTokenHighlight {
355 range,
356 style,
357 token_type: token.token_type,
358 token_modifiers: token.token_modifiers,
359 server_id: stylizer.server_id(),
360 })
361 })
362}
363
364fn convert_token(
365 stylizer: &SemanticTokenStylizer,
366 theme: &SyntaxTheme,
367 token_type: TokenType,
368 modifiers: u32,
369) -> Option<HighlightStyle> {
370 let rules = stylizer.rules_for_token(token_type)?;
371 let matching = rules.iter().filter(|rule| {
372 rule.token_modifiers
373 .iter()
374 .all(|m| stylizer.has_modifier(modifiers, m))
375 });
376
377 let mut highlight = HighlightStyle::default();
378 let mut empty = true;
379
380 for rule in matching {
381 empty = false;
382
383 let style = rule.style.iter().find_map(|style| theme.get_opt(style));
384
385 macro_rules! overwrite {
386 (
387 highlight.$highlight_field:ident,
388 SemanticTokenRule::$rule_field:ident,
389 $transform:expr $(,)?
390 ) => {
391 highlight.$highlight_field = rule
392 .$rule_field
393 .map($transform)
394 .or_else(|| style.and_then(|s| s.$highlight_field))
395 .or(highlight.$highlight_field)
396 };
397 }
398
399 overwrite!(
400 highlight.color,
401 SemanticTokenRule::foreground_color,
402 Into::into,
403 );
404
405 overwrite!(
406 highlight.background_color,
407 SemanticTokenRule::background_color,
408 Into::into,
409 );
410
411 overwrite!(
412 highlight.font_weight,
413 SemanticTokenRule::font_weight,
414 |w| match w {
415 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
416 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
417 },
418 );
419
420 overwrite!(
421 highlight.font_style,
422 SemanticTokenRule::font_style,
423 |s| match s {
424 SemanticTokenFontStyle::Normal => FontStyle::Normal,
425 SemanticTokenFontStyle::Italic => FontStyle::Italic,
426 },
427 );
428
429 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
430 UnderlineStyle {
431 thickness: 1.0.into(),
432 color: match u {
433 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
434 SemanticTokenColorOverride::InheritForeground(false) => None,
435 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
436 },
437 ..UnderlineStyle::default()
438 }
439 });
440
441 overwrite!(
442 highlight.strikethrough,
443 SemanticTokenRule::strikethrough,
444 |s| StrikethroughStyle {
445 thickness: 1.0.into(),
446 color: match s {
447 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
448 SemanticTokenColorOverride::InheritForeground(false) => None,
449 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
450 },
451 },
452 );
453 }
454
455 if empty { None } else { Some(highlight) }
456}
457
458#[cfg(test)]
459mod tests {
460 use std::{
461 ops::Range,
462 sync::atomic::{self, AtomicUsize},
463 };
464
465 use futures::StreamExt as _;
466 use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
467 use language::{Language, LanguageConfig, LanguageMatcher};
468 use languages::FakeLspAdapter;
469 use multi_buffer::{
470 AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
471 };
472 use project::Project;
473 use rope::Point;
474 use serde_json::json;
475 use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
476 use workspace::{MultiWorkspace, WorkspaceHandle as _};
477
478 use crate::{
479 Capability,
480 editor_tests::{init_test, update_test_language_settings},
481 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
482 };
483
484 use super::*;
485
486 #[gpui::test]
487 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
488 init_test(cx, |_| {});
489
490 update_test_language_settings(cx, &|language_settings| {
491 language_settings.languages.0.insert(
492 "Rust".into(),
493 LanguageSettingsContent {
494 semantic_tokens: Some(SemanticTokens::Full),
495 ..LanguageSettingsContent::default()
496 },
497 );
498 });
499
500 let mut cx = EditorLspTestContext::new_rust(
501 lsp::ServerCapabilities {
502 semantic_tokens_provider: Some(
503 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
504 lsp::SemanticTokensOptions {
505 legend: lsp::SemanticTokensLegend {
506 token_types: vec!["function".into()],
507 token_modifiers: Vec::new(),
508 },
509 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
510 ..lsp::SemanticTokensOptions::default()
511 },
512 ),
513 ),
514 ..lsp::ServerCapabilities::default()
515 },
516 cx,
517 )
518 .await;
519
520 let full_counter = Arc::new(AtomicUsize::new(0));
521 let full_counter_clone = full_counter.clone();
522
523 let mut full_request = cx
524 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
525 move |_, _, _| {
526 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
527 async move {
528 Ok(Some(lsp::SemanticTokensResult::Tokens(
529 lsp::SemanticTokens {
530 data: vec![
531 0, // delta_line
532 3, // delta_start
533 4, // length
534 0, // token_type
535 0, // token_modifiers_bitset
536 ],
537 // The server isn't capable of deltas, so even though we sent back
538 // a result ID, the client shouldn't request a delta.
539 result_id: Some("a".into()),
540 },
541 )))
542 }
543 },
544 );
545
546 cx.set_state("ˇfn main() {}");
547 assert!(full_request.next().await.is_some());
548
549 cx.run_until_parked();
550
551 cx.set_state("ˇfn main() { a }");
552 assert!(full_request.next().await.is_some());
553
554 cx.run_until_parked();
555
556 assert_eq!(
557 extract_semantic_highlights(&cx.editor, &cx),
558 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
559 );
560
561 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
562 }
563
564 #[gpui::test]
565 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
566 init_test(cx, |_| {});
567
568 update_test_language_settings(cx, &|language_settings| {
569 language_settings.languages.0.insert(
570 "Rust".into(),
571 LanguageSettingsContent {
572 semantic_tokens: Some(SemanticTokens::Full),
573 ..LanguageSettingsContent::default()
574 },
575 );
576 });
577
578 let mut cx = EditorLspTestContext::new_rust(
579 lsp::ServerCapabilities {
580 semantic_tokens_provider: Some(
581 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
582 lsp::SemanticTokensOptions {
583 legend: lsp::SemanticTokensLegend {
584 token_types: vec!["function".into()],
585 token_modifiers: Vec::new(),
586 },
587 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
588 ..lsp::SemanticTokensOptions::default()
589 },
590 ),
591 ),
592 ..lsp::ServerCapabilities::default()
593 },
594 cx,
595 )
596 .await;
597
598 let full_counter = Arc::new(AtomicUsize::new(0));
599 let full_counter_clone = full_counter.clone();
600
601 let mut full_request = cx
602 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
603 move |_, _, _| {
604 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
605 async move {
606 Ok(Some(lsp::SemanticTokensResult::Tokens(
607 lsp::SemanticTokens {
608 data: vec![
609 0, // delta_line
610 3, // delta_start
611 4, // length
612 0, // token_type
613 0, // token_modifiers_bitset
614 ],
615 result_id: None, // Sending back `None` forces the client to not use deltas.
616 },
617 )))
618 }
619 },
620 );
621
622 cx.set_state("ˇfn main() {}");
623 assert!(full_request.next().await.is_some());
624
625 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
626 task.await;
627
628 cx.set_state("ˇfn main() { a }");
629 assert!(full_request.next().await.is_some());
630
631 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
632 task.await;
633 assert_eq!(
634 extract_semantic_highlights(&cx.editor, &cx),
635 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
636 );
637 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
638 }
639
640 #[gpui::test]
641 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
642 init_test(cx, |_| {});
643
644 update_test_language_settings(cx, &|language_settings| {
645 language_settings.languages.0.insert(
646 "Rust".into(),
647 LanguageSettingsContent {
648 semantic_tokens: Some(SemanticTokens::Full),
649 ..LanguageSettingsContent::default()
650 },
651 );
652 });
653
654 let mut cx = EditorLspTestContext::new_rust(
655 lsp::ServerCapabilities {
656 semantic_tokens_provider: Some(
657 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
658 lsp::SemanticTokensOptions {
659 legend: lsp::SemanticTokensLegend {
660 token_types: vec!["function".into()],
661 token_modifiers: Vec::new(),
662 },
663 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
664 ..lsp::SemanticTokensOptions::default()
665 },
666 ),
667 ),
668 ..lsp::ServerCapabilities::default()
669 },
670 cx,
671 )
672 .await;
673
674 let full_counter = Arc::new(AtomicUsize::new(0));
675 let full_counter_clone = full_counter.clone();
676 let delta_counter = Arc::new(AtomicUsize::new(0));
677 let delta_counter_clone = delta_counter.clone();
678
679 let mut full_request = cx
680 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
681 move |_, _, _| {
682 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
683 async move {
684 Ok(Some(lsp::SemanticTokensResult::Tokens(
685 lsp::SemanticTokens {
686 data: vec![
687 0, // delta_line
688 3, // delta_start
689 4, // length
690 0, // token_type
691 0, // token_modifiers_bitset
692 ],
693 result_id: Some("a".into()),
694 },
695 )))
696 }
697 },
698 );
699
700 let mut delta_request = cx
701 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
702 move |_, params, _| {
703 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
704 assert_eq!(params.previous_result_id, "a");
705 async move {
706 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
707 lsp::SemanticTokensDelta {
708 edits: Vec::new(),
709 result_id: Some("b".into()),
710 },
711 )))
712 }
713 },
714 );
715
716 // Initial request, for the empty buffer.
717 cx.set_state("ˇfn main() {}");
718 assert!(full_request.next().await.is_some());
719 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
720 task.await;
721
722 cx.set_state("ˇfn main() { a }");
723 assert!(delta_request.next().await.is_some());
724 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
725 task.await;
726
727 assert_eq!(
728 extract_semantic_highlights(&cx.editor, &cx),
729 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
730 );
731
732 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
733 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
734 }
735
736 #[gpui::test]
737 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
738 init_test(cx, |_| {});
739
740 update_test_language_settings(cx, &|language_settings| {
741 language_settings.languages.0.insert(
742 "TOML".into(),
743 LanguageSettingsContent {
744 semantic_tokens: Some(SemanticTokens::Full),
745 ..LanguageSettingsContent::default()
746 },
747 );
748 });
749
750 let toml_language = Arc::new(Language::new(
751 LanguageConfig {
752 name: "TOML".into(),
753 matcher: LanguageMatcher {
754 path_suffixes: vec!["toml".into()],
755 ..LanguageMatcher::default()
756 },
757 ..LanguageConfig::default()
758 },
759 None,
760 ));
761
762 // We have 2 language servers for TOML in this test.
763 let toml_legend_1 = lsp::SemanticTokensLegend {
764 token_types: vec!["property".into()],
765 token_modifiers: Vec::new(),
766 };
767 let toml_legend_2 = lsp::SemanticTokensLegend {
768 token_types: vec!["number".into()],
769 token_modifiers: Vec::new(),
770 };
771
772 let app_state = cx.update(workspace::AppState::test);
773
774 cx.update(|cx| {
775 assets::Assets.load_test_fonts(cx);
776 crate::init(cx);
777 workspace::init(app_state.clone(), cx);
778 });
779
780 let project = Project::test(app_state.fs.clone(), [], cx).await;
781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
782
783 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
784 let full_counter_toml_1_clone = full_counter_toml_1.clone();
785 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
786 let full_counter_toml_2_clone = full_counter_toml_2.clone();
787
788 let mut toml_server_1 = language_registry.register_fake_lsp(
789 toml_language.name(),
790 FakeLspAdapter {
791 name: "toml1",
792 capabilities: lsp::ServerCapabilities {
793 semantic_tokens_provider: Some(
794 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
795 lsp::SemanticTokensOptions {
796 legend: toml_legend_1,
797 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
798 ..lsp::SemanticTokensOptions::default()
799 },
800 ),
801 ),
802 ..lsp::ServerCapabilities::default()
803 },
804 initializer: Some(Box::new({
805 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
806 move |fake_server| {
807 let full_counter = full_counter_toml_1_clone.clone();
808 fake_server
809 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
810 move |_, _| {
811 full_counter.fetch_add(1, atomic::Ordering::Release);
812 async move {
813 Ok(Some(lsp::SemanticTokensResult::Tokens(
814 lsp::SemanticTokens {
815 // highlight 'a' as a property
816 data: vec![
817 0, // delta_line
818 0, // delta_start
819 1, // length
820 0, // token_type
821 0, // token_modifiers_bitset
822 ],
823 result_id: Some("a".into()),
824 },
825 )))
826 }
827 },
828 );
829 }
830 })),
831 ..FakeLspAdapter::default()
832 },
833 );
834 let mut toml_server_2 = language_registry.register_fake_lsp(
835 toml_language.name(),
836 FakeLspAdapter {
837 name: "toml2",
838 capabilities: lsp::ServerCapabilities {
839 semantic_tokens_provider: Some(
840 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
841 lsp::SemanticTokensOptions {
842 legend: toml_legend_2,
843 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
844 ..lsp::SemanticTokensOptions::default()
845 },
846 ),
847 ),
848 ..lsp::ServerCapabilities::default()
849 },
850 initializer: Some(Box::new({
851 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
852 move |fake_server| {
853 let full_counter = full_counter_toml_2_clone.clone();
854 fake_server
855 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
856 move |_, _| {
857 full_counter.fetch_add(1, atomic::Ordering::Release);
858 async move {
859 Ok(Some(lsp::SemanticTokensResult::Tokens(
860 lsp::SemanticTokens {
861 // highlight '3' as a literal
862 data: vec![
863 0, // delta_line
864 4, // delta_start
865 1, // length
866 0, // token_type
867 0, // token_modifiers_bitset
868 ],
869 result_id: Some("a".into()),
870 },
871 )))
872 }
873 },
874 );
875 }
876 })),
877 ..FakeLspAdapter::default()
878 },
879 );
880 language_registry.add(toml_language.clone());
881
882 app_state
883 .fs
884 .as_fake()
885 .insert_tree(
886 EditorLspTestContext::root_path(),
887 json!({
888 ".git": {},
889 "dir": {
890 "foo.toml": "a = 1\nb = 2\n",
891 }
892 }),
893 )
894 .await;
895
896 let (multi_workspace, cx) =
897 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
898 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
899 project
900 .update(cx, |project, cx| {
901 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
902 })
903 .await
904 .unwrap();
905 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
906 .await;
907
908 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
909 let toml_item = workspace
910 .update_in(cx, |workspace, window, cx| {
911 workspace.open_path(toml_file, None, true, window, cx)
912 })
913 .await
914 .expect("Could not open test file");
915
916 let editor = cx.update(|_, cx| {
917 toml_item
918 .act_as::<Editor>(cx)
919 .expect("Opened test file wasn't an editor")
920 });
921
922 editor.update_in(cx, |editor, window, cx| {
923 let nav_history = workspace
924 .read(cx)
925 .active_pane()
926 .read(cx)
927 .nav_history_for_item(&cx.entity());
928 editor.set_nav_history(Some(nav_history));
929 window.focus(&editor.focus_handle(cx), cx)
930 });
931
932 let _toml_server_1 = toml_server_1.next().await.unwrap();
933 let _toml_server_2 = toml_server_2.next().await.unwrap();
934
935 // Trigger semantic tokens.
936 editor.update_in(cx, |editor, _, cx| {
937 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
938 });
939 cx.executor().advance_clock(Duration::from_millis(200));
940 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
941 cx.run_until_parked();
942 task.await;
943
944 assert_eq!(
945 extract_semantic_highlights(&editor, &cx),
946 vec![
947 MultiBufferOffset(0)..MultiBufferOffset(1),
948 MultiBufferOffset(4)..MultiBufferOffset(5),
949 ]
950 );
951
952 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
953 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
954 }
955
956 #[gpui::test]
957 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
958 init_test(cx, |_| {});
959
960 update_test_language_settings(cx, &|language_settings| {
961 language_settings.languages.0.insert(
962 "TOML".into(),
963 LanguageSettingsContent {
964 semantic_tokens: Some(SemanticTokens::Full),
965 ..LanguageSettingsContent::default()
966 },
967 );
968 language_settings.languages.0.insert(
969 "Rust".into(),
970 LanguageSettingsContent {
971 semantic_tokens: Some(SemanticTokens::Full),
972 ..LanguageSettingsContent::default()
973 },
974 );
975 });
976
977 let toml_language = Arc::new(Language::new(
978 LanguageConfig {
979 name: "TOML".into(),
980 matcher: LanguageMatcher {
981 path_suffixes: vec!["toml".into()],
982 ..LanguageMatcher::default()
983 },
984 ..LanguageConfig::default()
985 },
986 None,
987 ));
988 let rust_language = Arc::new(Language::new(
989 LanguageConfig {
990 name: "Rust".into(),
991 matcher: LanguageMatcher {
992 path_suffixes: vec!["rs".into()],
993 ..LanguageMatcher::default()
994 },
995 ..LanguageConfig::default()
996 },
997 None,
998 ));
999
1000 let toml_legend = lsp::SemanticTokensLegend {
1001 token_types: vec!["property".into()],
1002 token_modifiers: Vec::new(),
1003 };
1004 let rust_legend = lsp::SemanticTokensLegend {
1005 token_types: vec!["constant".into()],
1006 token_modifiers: Vec::new(),
1007 };
1008
1009 let app_state = cx.update(workspace::AppState::test);
1010
1011 cx.update(|cx| {
1012 assets::Assets.load_test_fonts(cx);
1013 crate::init(cx);
1014 workspace::init(app_state.clone(), cx);
1015 });
1016
1017 let project = Project::test(app_state.fs.clone(), [], cx).await;
1018 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1019 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1020 let full_counter_toml_clone = full_counter_toml.clone();
1021
1022 let mut toml_server = language_registry.register_fake_lsp(
1023 toml_language.name(),
1024 FakeLspAdapter {
1025 name: "toml",
1026 capabilities: lsp::ServerCapabilities {
1027 semantic_tokens_provider: Some(
1028 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1029 lsp::SemanticTokensOptions {
1030 legend: toml_legend,
1031 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1032 ..lsp::SemanticTokensOptions::default()
1033 },
1034 ),
1035 ),
1036 ..lsp::ServerCapabilities::default()
1037 },
1038 initializer: Some(Box::new({
1039 let full_counter_toml_clone = full_counter_toml_clone.clone();
1040 move |fake_server| {
1041 let full_counter = full_counter_toml_clone.clone();
1042 fake_server
1043 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1044 move |_, _| {
1045 full_counter.fetch_add(1, atomic::Ordering::Release);
1046 async move {
1047 Ok(Some(lsp::SemanticTokensResult::Tokens(
1048 lsp::SemanticTokens {
1049 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1050 data: vec![
1051 0, // delta_line (line 0)
1052 0, // delta_start
1053 1, // length
1054 0, // token_type
1055 0, // token_modifiers_bitset
1056 1, // delta_line (line 1)
1057 0, // delta_start
1058 1, // length
1059 0, // token_type
1060 0, // token_modifiers_bitset
1061 1, // delta_line (line 2)
1062 0, // delta_start
1063 1, // length
1064 0, // token_type
1065 0, // token_modifiers_bitset
1066 ],
1067 result_id: Some("a".into()),
1068 },
1069 )))
1070 }
1071 },
1072 );
1073 }
1074 })),
1075 ..FakeLspAdapter::default()
1076 },
1077 );
1078 language_registry.add(toml_language.clone());
1079 let mut rust_server = language_registry.register_fake_lsp(
1080 rust_language.name(),
1081 FakeLspAdapter {
1082 name: "rust",
1083 capabilities: lsp::ServerCapabilities {
1084 semantic_tokens_provider: Some(
1085 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1086 lsp::SemanticTokensOptions {
1087 legend: rust_legend,
1088 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1089 ..lsp::SemanticTokensOptions::default()
1090 },
1091 ),
1092 ),
1093 ..lsp::ServerCapabilities::default()
1094 },
1095 ..FakeLspAdapter::default()
1096 },
1097 );
1098 language_registry.add(rust_language.clone());
1099
1100 app_state
1101 .fs
1102 .as_fake()
1103 .insert_tree(
1104 EditorLspTestContext::root_path(),
1105 json!({
1106 ".git": {},
1107 "dir": {
1108 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1109 "bar.rs": "const c: usize = 3;\n",
1110 }
1111 }),
1112 )
1113 .await;
1114
1115 let (multi_workspace, cx) =
1116 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1117 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1118 project
1119 .update(cx, |project, cx| {
1120 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1121 })
1122 .await
1123 .unwrap();
1124 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1125 .await;
1126
1127 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1128 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1129 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1130 (
1131 workspace.open_path(toml_file, None, true, window, cx),
1132 workspace.open_path(rust_file, None, true, window, cx),
1133 )
1134 });
1135 let toml_item = toml_item.await.expect("Could not open test file");
1136 let rust_item = rust_item.await.expect("Could not open test file");
1137
1138 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1139 (
1140 toml_item
1141 .act_as::<Editor>(cx)
1142 .expect("Opened test file wasn't an editor"),
1143 rust_item
1144 .act_as::<Editor>(cx)
1145 .expect("Opened test file wasn't an editor"),
1146 )
1147 });
1148 let toml_buffer = cx.read(|cx| {
1149 toml_editor
1150 .read(cx)
1151 .buffer()
1152 .read(cx)
1153 .as_singleton()
1154 .unwrap()
1155 });
1156 let rust_buffer = cx.read(|cx| {
1157 rust_editor
1158 .read(cx)
1159 .buffer()
1160 .read(cx)
1161 .as_singleton()
1162 .unwrap()
1163 });
1164 let multibuffer = cx.new(|cx| {
1165 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1166 multibuffer.set_excerpts_for_path(
1167 PathKey::sorted(0),
1168 toml_buffer.clone(),
1169 [Point::new(0, 0)..Point::new(0, 4)],
1170 0,
1171 cx,
1172 );
1173 multibuffer.set_excerpts_for_path(
1174 PathKey::sorted(1),
1175 rust_buffer.clone(),
1176 [Point::new(0, 0)..Point::new(0, 4)],
1177 0,
1178 cx,
1179 );
1180 multibuffer
1181 });
1182
1183 let editor = workspace.update_in(cx, |workspace, window, cx| {
1184 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1185 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1186 editor
1187 });
1188 editor.update_in(cx, |editor, window, cx| {
1189 let nav_history = workspace
1190 .read(cx)
1191 .active_pane()
1192 .read(cx)
1193 .nav_history_for_item(&cx.entity());
1194 editor.set_nav_history(Some(nav_history));
1195 window.focus(&editor.focus_handle(cx), cx)
1196 });
1197
1198 let _toml_server = toml_server.next().await.unwrap();
1199 let _rust_server = rust_server.next().await.unwrap();
1200
1201 // Initial request.
1202 cx.executor().advance_clock(Duration::from_millis(200));
1203 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1204 cx.run_until_parked();
1205 task.await;
1206 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1207 cx.run_until_parked();
1208
1209 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1210 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1211 assert_eq!(
1212 extract_semantic_highlights(&editor, &cx),
1213 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1214 );
1215
1216 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1217 let toml_excerpt_id =
1218 editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1219 editor.update_in(cx, |editor, _, cx| {
1220 editor.buffer().update(cx, |buffer, cx| {
1221 buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1222 });
1223 });
1224
1225 // Wait for semantic tokens to be re-fetched after expansion.
1226 cx.executor().advance_clock(Duration::from_millis(200));
1227 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1228 cx.run_until_parked();
1229 task.await;
1230
1231 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1232 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1233 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1234 assert_eq!(
1235 extract_semantic_highlights(&editor, &cx),
1236 vec![
1237 MultiBufferOffset(0)..MultiBufferOffset(1),
1238 MultiBufferOffset(6)..MultiBufferOffset(7),
1239 MultiBufferOffset(12)..MultiBufferOffset(13),
1240 ]
1241 );
1242 }
1243
1244 fn extract_semantic_highlights(
1245 editor: &Entity<Editor>,
1246 cx: &TestAppContext,
1247 ) -> Vec<Range<MultiBufferOffset>> {
1248 editor.read_with(cx, |editor, cx| {
1249 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1250 editor
1251 .display_map
1252 .read(cx)
1253 .semantic_token_highlights
1254 .iter()
1255 .flat_map(|(_, (v, _))| v.iter())
1256 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1257 .collect()
1258 })
1259 }
1260
1261 #[gpui::test]
1262 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1263 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1264 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1265
1266 init_test(cx, |_| {});
1267
1268 update_test_language_settings(cx, &|language_settings| {
1269 language_settings.languages.0.insert(
1270 "Rust".into(),
1271 LanguageSettingsContent {
1272 semantic_tokens: Some(SemanticTokens::Full),
1273 ..LanguageSettingsContent::default()
1274 },
1275 );
1276 });
1277
1278 let mut cx = EditorLspTestContext::new_rust(
1279 lsp::ServerCapabilities {
1280 semantic_tokens_provider: Some(
1281 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1282 lsp::SemanticTokensOptions {
1283 legend: lsp::SemanticTokensLegend {
1284 token_types: Vec::from(["function".into()]),
1285 token_modifiers: Vec::new(),
1286 },
1287 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1288 ..lsp::SemanticTokensOptions::default()
1289 },
1290 ),
1291 ),
1292 ..lsp::ServerCapabilities::default()
1293 },
1294 cx,
1295 )
1296 .await;
1297
1298 let mut full_request = cx
1299 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1300 move |_, _, _| {
1301 async move {
1302 Ok(Some(lsp::SemanticTokensResult::Tokens(
1303 lsp::SemanticTokens {
1304 data: vec![
1305 0, // delta_line
1306 3, // delta_start
1307 4, // length
1308 0, // token_type (function)
1309 0, // token_modifiers_bitset
1310 ],
1311 result_id: None,
1312 },
1313 )))
1314 }
1315 },
1316 );
1317
1318 // Trigger initial semantic tokens fetch
1319 cx.set_state("ˇfn main() {}");
1320 full_request.next().await;
1321 cx.run_until_parked();
1322
1323 // Verify initial highlights exist (with no custom color yet)
1324 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1325 assert_eq!(
1326 initial_ranges,
1327 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1328 "Should have initial semantic token highlights"
1329 );
1330 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1331 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1332 // Initial color should be None or theme default (not red or blue)
1333 let initial_color = initial_styles[0].color;
1334
1335 // Set a custom foreground color for function tokens via settings.json
1336 let red_color = Rgba {
1337 r: 1.0,
1338 g: 0.0,
1339 b: 0.0,
1340 a: 1.0,
1341 };
1342 cx.update(|_, cx| {
1343 SettingsStore::update_global(cx, |store, cx| {
1344 store.update_user_settings(cx, |settings| {
1345 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1346 semantic_token_rules: Some(SemanticTokenRules {
1347 rules: Vec::from([SemanticTokenRule {
1348 token_type: Some("function".to_string()),
1349 foreground_color: Some(red_color),
1350 ..SemanticTokenRule::default()
1351 }]),
1352 }),
1353 ..GlobalLspSettingsContent::default()
1354 });
1355 });
1356 });
1357 });
1358
1359 // Trigger a refetch by making an edit (which forces semantic tokens update)
1360 cx.set_state("ˇfn main() { }");
1361 full_request.next().await;
1362 cx.run_until_parked();
1363
1364 // Verify the highlights now have the custom red color
1365 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1366 assert_eq!(
1367 styles_after_settings_change.len(),
1368 1,
1369 "Should still have one highlight"
1370 );
1371 assert_eq!(
1372 styles_after_settings_change[0].color,
1373 Some(Hsla::from(red_color)),
1374 "Highlight should have the custom red color from settings.json"
1375 );
1376 assert_ne!(
1377 styles_after_settings_change[0].color, initial_color,
1378 "Color should have changed from initial"
1379 );
1380 }
1381
1382 #[gpui::test]
1383 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1384 use collections::IndexMap;
1385 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1386 use theme::{HighlightStyleContent, ThemeStyleContent};
1387
1388 init_test(cx, |_| {});
1389
1390 update_test_language_settings(cx, &|language_settings| {
1391 language_settings.languages.0.insert(
1392 "Rust".into(),
1393 LanguageSettingsContent {
1394 semantic_tokens: Some(SemanticTokens::Full),
1395 ..LanguageSettingsContent::default()
1396 },
1397 );
1398 });
1399
1400 let mut cx = EditorLspTestContext::new_rust(
1401 lsp::ServerCapabilities {
1402 semantic_tokens_provider: Some(
1403 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1404 lsp::SemanticTokensOptions {
1405 legend: lsp::SemanticTokensLegend {
1406 token_types: Vec::from(["function".into()]),
1407 token_modifiers: Vec::new(),
1408 },
1409 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1410 ..lsp::SemanticTokensOptions::default()
1411 },
1412 ),
1413 ),
1414 ..lsp::ServerCapabilities::default()
1415 },
1416 cx,
1417 )
1418 .await;
1419
1420 let mut full_request = cx
1421 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1422 move |_, _, _| async move {
1423 Ok(Some(lsp::SemanticTokensResult::Tokens(
1424 lsp::SemanticTokens {
1425 data: vec![
1426 0, // delta_line
1427 3, // delta_start
1428 4, // length
1429 0, // token_type (function)
1430 0, // token_modifiers_bitset
1431 ],
1432 result_id: None,
1433 },
1434 )))
1435 },
1436 );
1437
1438 cx.set_state("ˇfn main() {}");
1439 full_request.next().await;
1440 cx.run_until_parked();
1441
1442 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1443 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1444 let initial_color = initial_styles[0].color;
1445
1446 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1447 // which fires theme_changed → refresh_semantic_token_highlights.
1448 let red_color: Hsla = Rgba {
1449 r: 1.0,
1450 g: 0.0,
1451 b: 0.0,
1452 a: 1.0,
1453 }
1454 .into();
1455 cx.update(|_, cx| {
1456 SettingsStore::update_global(cx, |store, cx| {
1457 store.update_user_settings(cx, |settings| {
1458 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1459 syntax: IndexMap::from_iter([(
1460 "function".to_string(),
1461 HighlightStyleContent {
1462 color: Some("#ff0000".to_string()),
1463 background_color: None,
1464 font_style: None,
1465 font_weight: None,
1466 },
1467 )]),
1468 ..ThemeStyleContent::default()
1469 });
1470 });
1471 });
1472 });
1473
1474 cx.executor().advance_clock(Duration::from_millis(200));
1475 cx.run_until_parked();
1476
1477 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1478 assert_eq!(styles_after_override.len(), 1);
1479 assert_eq!(
1480 styles_after_override[0].color,
1481 Some(red_color),
1482 "Highlight should have red color from theme override"
1483 );
1484 assert_ne!(
1485 styles_after_override[0].color, initial_color,
1486 "Color should have changed from initial"
1487 );
1488
1489 // Changing the override to a different color also restyles.
1490 let blue_color: Hsla = Rgba {
1491 r: 0.0,
1492 g: 0.0,
1493 b: 1.0,
1494 a: 1.0,
1495 }
1496 .into();
1497 cx.update(|_, cx| {
1498 SettingsStore::update_global(cx, |store, cx| {
1499 store.update_user_settings(cx, |settings| {
1500 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1501 syntax: IndexMap::from_iter([(
1502 "function".to_string(),
1503 HighlightStyleContent {
1504 color: Some("#0000ff".to_string()),
1505 background_color: None,
1506 font_style: None,
1507 font_weight: None,
1508 },
1509 )]),
1510 ..ThemeStyleContent::default()
1511 });
1512 });
1513 });
1514 });
1515
1516 cx.executor().advance_clock(Duration::from_millis(200));
1517 cx.run_until_parked();
1518
1519 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1520 assert_eq!(styles_after_second_override.len(), 1);
1521 assert_eq!(
1522 styles_after_second_override[0].color,
1523 Some(blue_color),
1524 "Highlight should have blue color from updated theme override"
1525 );
1526
1527 // Removing overrides reverts to the original theme color.
1528 cx.update(|_, cx| {
1529 SettingsStore::update_global(cx, |store, cx| {
1530 store.update_user_settings(cx, |settings| {
1531 settings.theme.experimental_theme_overrides = None;
1532 });
1533 });
1534 });
1535
1536 cx.executor().advance_clock(Duration::from_millis(200));
1537 cx.run_until_parked();
1538
1539 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1540 assert_eq!(styles_after_clear.len(), 1);
1541 assert_eq!(
1542 styles_after_clear[0].color, initial_color,
1543 "Highlight should revert to initial color after clearing overrides"
1544 );
1545 }
1546
1547 #[gpui::test]
1548 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1549 use collections::IndexMap;
1550 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1551 use theme::{HighlightStyleContent, ThemeStyleContent};
1552 use ui::ActiveTheme as _;
1553
1554 init_test(cx, |_| {});
1555
1556 update_test_language_settings(cx, &|language_settings| {
1557 language_settings.languages.0.insert(
1558 "Rust".into(),
1559 LanguageSettingsContent {
1560 semantic_tokens: Some(SemanticTokens::Full),
1561 ..LanguageSettingsContent::default()
1562 },
1563 );
1564 });
1565
1566 let mut cx = EditorLspTestContext::new_rust(
1567 lsp::ServerCapabilities {
1568 semantic_tokens_provider: Some(
1569 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1570 lsp::SemanticTokensOptions {
1571 legend: lsp::SemanticTokensLegend {
1572 token_types: Vec::from(["function".into()]),
1573 token_modifiers: Vec::new(),
1574 },
1575 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1576 ..lsp::SemanticTokensOptions::default()
1577 },
1578 ),
1579 ),
1580 ..lsp::ServerCapabilities::default()
1581 },
1582 cx,
1583 )
1584 .await;
1585
1586 let mut full_request = cx
1587 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1588 move |_, _, _| async move {
1589 Ok(Some(lsp::SemanticTokensResult::Tokens(
1590 lsp::SemanticTokens {
1591 data: vec![
1592 0, // delta_line
1593 3, // delta_start
1594 4, // length
1595 0, // token_type (function)
1596 0, // token_modifiers_bitset
1597 ],
1598 result_id: None,
1599 },
1600 )))
1601 },
1602 );
1603
1604 cx.set_state("ˇfn main() {}");
1605 full_request.next().await;
1606 cx.run_until_parked();
1607
1608 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1609 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1610 let initial_color = initial_styles[0].color;
1611
1612 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1613 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1614 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1615 let green_color: Hsla = Rgba {
1616 r: 0.0,
1617 g: 1.0,
1618 b: 0.0,
1619 a: 1.0,
1620 }
1621 .into();
1622 cx.update(|_, cx| {
1623 SettingsStore::update_global(cx, |store, cx| {
1624 store.update_user_settings(cx, |settings| {
1625 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1626 theme_name.clone(),
1627 ThemeStyleContent {
1628 syntax: IndexMap::from_iter([(
1629 "function".to_string(),
1630 HighlightStyleContent {
1631 color: Some("#00ff00".to_string()),
1632 background_color: None,
1633 font_style: None,
1634 font_weight: None,
1635 },
1636 )]),
1637 ..ThemeStyleContent::default()
1638 },
1639 )]);
1640 });
1641 });
1642 });
1643
1644 cx.executor().advance_clock(Duration::from_millis(200));
1645 cx.run_until_parked();
1646
1647 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1648 assert_eq!(styles_after_override.len(), 1);
1649 assert_eq!(
1650 styles_after_override[0].color,
1651 Some(green_color),
1652 "Highlight should have green color from per-theme override"
1653 );
1654 assert_ne!(
1655 styles_after_override[0].color, initial_color,
1656 "Color should have changed from initial"
1657 );
1658 }
1659
1660 #[gpui::test]
1661 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1662 init_test(cx, |_| {});
1663
1664 update_test_language_settings(cx, &|language_settings| {
1665 language_settings.languages.0.insert(
1666 "Rust".into(),
1667 LanguageSettingsContent {
1668 semantic_tokens: Some(SemanticTokens::Full),
1669 ..LanguageSettingsContent::default()
1670 },
1671 );
1672 });
1673
1674 let mut cx = EditorLspTestContext::new_rust(
1675 lsp::ServerCapabilities {
1676 semantic_tokens_provider: Some(
1677 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1678 lsp::SemanticTokensOptions {
1679 legend: lsp::SemanticTokensLegend {
1680 token_types: vec!["function".into()],
1681 token_modifiers: Vec::new(),
1682 },
1683 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1684 ..lsp::SemanticTokensOptions::default()
1685 },
1686 ),
1687 ),
1688 ..lsp::ServerCapabilities::default()
1689 },
1690 cx,
1691 )
1692 .await;
1693
1694 let mut full_request = cx
1695 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1696 move |_, _, _| async move {
1697 Ok(Some(lsp::SemanticTokensResult::Tokens(
1698 lsp::SemanticTokens {
1699 data: vec![
1700 0, // delta_line
1701 3, // delta_start
1702 4, // length
1703 0, // token_type
1704 0, // token_modifiers_bitset
1705 ],
1706 result_id: None,
1707 },
1708 )))
1709 },
1710 );
1711
1712 cx.set_state("ˇfn main() {}");
1713 assert!(full_request.next().await.is_some());
1714 cx.run_until_parked();
1715
1716 assert_eq!(
1717 extract_semantic_highlights(&cx.editor, &cx),
1718 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1719 "Semantic tokens should be present before stopping the server"
1720 );
1721
1722 cx.update_editor(|editor, _, cx| {
1723 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1724 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1725 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1726 })
1727 });
1728 cx.executor().advance_clock(Duration::from_millis(200));
1729 cx.run_until_parked();
1730
1731 assert_eq!(
1732 extract_semantic_highlights(&cx.editor, &cx),
1733 Vec::new(),
1734 "Semantic tokens should be cleared after stopping the server"
1735 );
1736 }
1737
1738 #[gpui::test]
1739 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1740 init_test(cx, |_| {});
1741
1742 update_test_language_settings(cx, &|language_settings| {
1743 language_settings.languages.0.insert(
1744 "Rust".into(),
1745 LanguageSettingsContent {
1746 semantic_tokens: Some(SemanticTokens::Full),
1747 ..LanguageSettingsContent::default()
1748 },
1749 );
1750 });
1751
1752 let mut cx = EditorLspTestContext::new_rust(
1753 lsp::ServerCapabilities {
1754 semantic_tokens_provider: Some(
1755 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1756 lsp::SemanticTokensOptions {
1757 legend: lsp::SemanticTokensLegend {
1758 token_types: vec!["function".into()],
1759 token_modifiers: Vec::new(),
1760 },
1761 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1762 ..lsp::SemanticTokensOptions::default()
1763 },
1764 ),
1765 ),
1766 ..lsp::ServerCapabilities::default()
1767 },
1768 cx,
1769 )
1770 .await;
1771
1772 let mut full_request = cx
1773 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1774 move |_, _, _| async move {
1775 Ok(Some(lsp::SemanticTokensResult::Tokens(
1776 lsp::SemanticTokens {
1777 data: vec![
1778 0, // delta_line
1779 3, // delta_start
1780 4, // length
1781 0, // token_type
1782 0, // token_modifiers_bitset
1783 ],
1784 result_id: None,
1785 },
1786 )))
1787 },
1788 );
1789
1790 cx.set_state("ˇfn main() {}");
1791 assert!(full_request.next().await.is_some());
1792 cx.run_until_parked();
1793
1794 assert_eq!(
1795 extract_semantic_highlights(&cx.editor, &cx),
1796 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1797 "Semantic tokens should be present before disabling the setting"
1798 );
1799
1800 update_test_language_settings(&mut cx, &|language_settings| {
1801 language_settings.languages.0.insert(
1802 "Rust".into(),
1803 LanguageSettingsContent {
1804 semantic_tokens: Some(SemanticTokens::Off),
1805 ..LanguageSettingsContent::default()
1806 },
1807 );
1808 });
1809 cx.executor().advance_clock(Duration::from_millis(200));
1810 cx.run_until_parked();
1811
1812 assert_eq!(
1813 extract_semantic_highlights(&cx.editor, &cx),
1814 Vec::new(),
1815 "Semantic tokens should be cleared after disabling the setting"
1816 );
1817 }
1818
1819 fn extract_semantic_highlight_styles(
1820 editor: &Entity<Editor>,
1821 cx: &TestAppContext,
1822 ) -> Vec<HighlightStyle> {
1823 editor.read_with(cx, |editor, cx| {
1824 editor
1825 .display_map
1826 .read(cx)
1827 .semantic_token_highlights
1828 .iter()
1829 .flat_map(|(_, (v, interner))| {
1830 v.iter().map(|highlights| interner[highlights.style])
1831 })
1832 .collect()
1833 })
1834 }
1835}