1use std::{collections::hash_map, sync::Arc, time::Duration};
2
3use collections::{HashMap, HashSet};
4use futures::future::join_all;
5use gpui::{
6 App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
7};
8use itertools::Itertools;
9use language::language_settings::language_settings;
10use project::{
11 lsp_store::{
12 BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
13 TokenType,
14 },
15 project_settings::ProjectSettings,
16};
17use settings::{
18 SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
19 SemanticTokenRules, Settings as _,
20};
21use text::BufferId;
22use theme::SyntaxTheme;
23use ui::ActiveTheme as _;
24
25use crate::{
26 Editor,
27 actions::ToggleSemanticHighlights,
28 display_map::{HighlightStyleInterner, SemanticTokenHighlight},
29};
30
31pub(super) struct SemanticTokenState {
32 rules: SemanticTokenRules,
33 enabled: bool,
34 update_task: Task<()>,
35 fetched_for_buffers: HashMap<BufferId, clock::Global>,
36}
37
38impl SemanticTokenState {
39 pub(super) fn new(cx: &App, enabled: bool) -> Self {
40 Self {
41 rules: ProjectSettings::get_global(cx)
42 .global_lsp_settings
43 .semantic_token_rules
44 .clone(),
45 enabled,
46 update_task: Task::ready(()),
47 fetched_for_buffers: HashMap::default(),
48 }
49 }
50
51 pub(super) fn enabled(&self) -> bool {
52 self.enabled
53 }
54
55 pub(super) fn toggle_enabled(&mut self) {
56 self.enabled = !self.enabled;
57 }
58
59 #[cfg(test)]
60 pub(super) fn take_update_task(&mut self) -> Task<()> {
61 std::mem::replace(&mut self.update_task, Task::ready(()))
62 }
63
64 pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
65 self.fetched_for_buffers.remove(buffer_id);
66 }
67
68 pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
69 if new_rules != self.rules {
70 self.rules = new_rules;
71 true
72 } else {
73 false
74 }
75 }
76}
77
78impl Editor {
79 pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
80 let Some(provider) = self.semantics_provider.as_ref() else {
81 return false;
82 };
83
84 let mut supports = false;
85 self.buffer().update(cx, |this, cx| {
86 this.for_each_buffer(&mut |buffer| {
87 supports |= provider.supports_semantic_tokens(buffer, cx);
88 });
89 });
90
91 supports
92 }
93
94 pub fn semantic_highlights_enabled(&self) -> bool {
95 self.semantic_token_state.enabled()
96 }
97
98 pub fn toggle_semantic_highlights(
99 &mut self,
100 _: &ToggleSemanticHighlights,
101 _window: &mut gpui::Window,
102 cx: &mut Context<Self>,
103 ) {
104 self.semantic_token_state.toggle_enabled();
105 self.invalidate_semantic_tokens(None);
106 self.refresh_semantic_tokens(None, None, cx);
107 }
108
109 pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
110 match for_buffer {
111 Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
112 None => self.semantic_token_state.fetched_for_buffers.clear(),
113 }
114 }
115
116 pub(super) fn refresh_semantic_tokens(
117 &mut self,
118 buffer_id: Option<BufferId>,
119 for_server: Option<RefreshForServer>,
120 cx: &mut Context<Self>,
121 ) {
122 if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
123 self.invalidate_semantic_tokens(None);
124 self.display_map.update(cx, |display_map, _| {
125 match Arc::get_mut(&mut display_map.semantic_token_highlights) {
126 Some(highlights) => highlights.clear(),
127 None => display_map.semantic_token_highlights = Arc::new(Default::default()),
128 };
129 });
130 self.semantic_token_state.update_task = Task::ready(());
131 cx.notify();
132 return;
133 }
134
135 let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
136 if for_server.is_some() {
137 invalidate_semantic_highlights_for_buffers.extend(
138 self.semantic_token_state
139 .fetched_for_buffers
140 .drain()
141 .map(|(buffer_id, _)| buffer_id),
142 );
143 }
144
145 let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
146 else {
147 return;
148 };
149
150 let buffers_to_query = self
151 .visible_excerpts(true, cx)
152 .into_values()
153 .map(|(buffer, ..)| buffer)
154 .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
155 .filter_map(|editor_buffer| {
156 let editor_buffer_id = editor_buffer.read(cx).remote_id();
157 if self.registered_buffers.contains_key(&editor_buffer_id)
158 && language_settings(
159 editor_buffer.read(cx).language().map(|l| l.name()),
160 editor_buffer.read(cx).file(),
161 cx,
162 )
163 .semantic_tokens
164 .enabled()
165 {
166 Some((editor_buffer_id, editor_buffer))
167 } else {
168 None
169 }
170 })
171 .collect::<HashMap<_, _>>();
172
173 for buffer_with_disabled_tokens in self
174 .display_map
175 .read(cx)
176 .semantic_token_highlights
177 .keys()
178 .copied()
179 .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
180 .filter(|buffer_id| {
181 !self
182 .buffer
183 .read(cx)
184 .buffer(*buffer_id)
185 .is_some_and(|buffer| {
186 let buffer = buffer.read(cx);
187 language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
188 .semantic_tokens
189 .enabled()
190 })
191 })
192 .collect::<Vec<_>>()
193 {
194 self.semantic_token_state
195 .invalidate_buffer(&buffer_with_disabled_tokens);
196 self.display_map.update(cx, |display_map, _| {
197 display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
198 });
199 }
200
201 self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
202 cx.background_executor()
203 .timer(Duration::from_millis(50))
204 .await;
205 let Some(all_semantic_tokens_task) = editor
206 .update(cx, |editor, cx| {
207 buffers_to_query
208 .into_iter()
209 .filter_map(|(buffer_id, buffer)| {
210 let known_version = editor
211 .semantic_token_state
212 .fetched_for_buffers
213 .get(&buffer_id);
214 let query_version = buffer.read(cx).version();
215 if known_version.is_some_and(|known_version| {
216 !query_version.changed_since(known_version)
217 }) {
218 None
219 } else {
220 let task = sema.semantic_tokens(buffer, for_server, cx);
221 Some(async move { (buffer_id, query_version, task.await) })
222 }
223 })
224 .collect::<Vec<_>>()
225 })
226 .ok()
227 else {
228 return;
229 };
230
231 let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
232 editor
233 .update(cx, |editor, cx| {
234 editor.display_map.update(cx, |display_map, _| {
235 for buffer_id in invalidate_semantic_highlights_for_buffers {
236 display_map.invalidate_semantic_highlights(buffer_id);
237 editor.semantic_token_state.invalidate_buffer(&buffer_id);
238 }
239 });
240
241 if all_semantic_tokens.is_empty() {
242 return;
243 }
244 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
245
246 for (buffer_id, query_version, tokens) in all_semantic_tokens {
247 let tokens = match tokens {
248 Ok(BufferSemanticTokens {
249 tokens: Some(tokens),
250 }) => tokens,
251 Ok(BufferSemanticTokens { tokens: None }) => {
252 editor.display_map.update(cx, |display_map, _| {
253 display_map.invalidate_semantic_highlights(buffer_id);
254 });
255 continue;
256 }
257 Err(e) => {
258 log::error!(
259 "Failed to fetch semantic tokens for buffer \
260 {buffer_id:?}: {e:#}"
261 );
262 continue;
263 }
264 };
265
266 match editor
267 .semantic_token_state
268 .fetched_for_buffers
269 .entry(buffer_id)
270 {
271 hash_map::Entry::Occupied(mut o) => {
272 if query_version.changed_since(o.get()) {
273 o.insert(query_version);
274 } else {
275 continue;
276 }
277 }
278 hash_map::Entry::Vacant(v) => {
279 v.insert(query_version);
280 }
281 }
282
283 let language_name = editor
284 .buffer()
285 .read(cx)
286 .buffer(buffer_id)
287 .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
288
289 editor.display_map.update(cx, |display_map, cx| {
290 project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
291 let mut token_highlights = Vec::new();
292 let mut interner = HighlightStyleInterner::default();
293 for (server_id, server_tokens) in tokens {
294 let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
295 server_id,
296 language_name.as_ref(),
297 cx,
298 ) else {
299 continue;
300 };
301 token_highlights.reserve(2 * server_tokens.len());
302 token_highlights.extend(buffer_into_editor_highlights(
303 &server_tokens,
304 stylizer,
305 &multi_buffer_snapshot,
306 &mut interner,
307 cx,
308 ));
309 }
310
311 token_highlights.sort_by(|a, b| {
312 a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
313 });
314 Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
315 buffer_id,
316 (Arc::from(token_highlights), Arc::new(interner)),
317 );
318 });
319 });
320 }
321
322 cx.notify();
323 })
324 .ok();
325 });
326 }
327}
328
329fn buffer_into_editor_highlights<'a, 'b>(
330 buffer_tokens: &'a [BufferSemanticToken],
331 stylizer: &'a SemanticTokenStylizer,
332 multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
333 interner: &'b mut HighlightStyleInterner,
334 cx: &'a App,
335) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
336 multi_buffer_snapshot
337 .text_anchors_to_visible_anchors(
338 buffer_tokens
339 .iter()
340 .flat_map(|token| [token.range.start, token.range.end]),
341 )
342 .into_iter()
343 .tuples::<(_, _)>()
344 .zip(buffer_tokens)
345 .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
346 let range = multi_buffer_start?..multi_buffer_end?;
347 let style = convert_token(
348 stylizer,
349 cx.theme().syntax(),
350 token.token_type,
351 token.token_modifiers,
352 )?;
353 let style = interner.intern(style);
354 Some(SemanticTokenHighlight {
355 range,
356 style,
357 token_type: token.token_type,
358 token_modifiers: token.token_modifiers,
359 server_id: stylizer.server_id(),
360 })
361 })
362}
363
364fn convert_token(
365 stylizer: &SemanticTokenStylizer,
366 theme: &SyntaxTheme,
367 token_type: TokenType,
368 modifiers: u32,
369) -> Option<HighlightStyle> {
370 let rules = stylizer.rules_for_token(token_type)?;
371 let matching = rules.iter().filter(|rule| {
372 rule.token_modifiers
373 .iter()
374 .all(|m| stylizer.has_modifier(modifiers, m))
375 });
376
377 let mut highlight = HighlightStyle::default();
378 let mut empty = true;
379
380 for rule in matching {
381 empty = false;
382
383 let style = rule.style.iter().find_map(|style| theme.get_opt(style));
384
385 macro_rules! overwrite {
386 (
387 highlight.$highlight_field:ident,
388 SemanticTokenRule::$rule_field:ident,
389 $transform:expr $(,)?
390 ) => {
391 highlight.$highlight_field = rule
392 .$rule_field
393 .map($transform)
394 .or_else(|| style.and_then(|s| s.$highlight_field))
395 .or(highlight.$highlight_field)
396 };
397 }
398
399 overwrite!(
400 highlight.color,
401 SemanticTokenRule::foreground_color,
402 Into::into,
403 );
404
405 overwrite!(
406 highlight.background_color,
407 SemanticTokenRule::background_color,
408 Into::into,
409 );
410
411 overwrite!(
412 highlight.font_weight,
413 SemanticTokenRule::font_weight,
414 |w| match w {
415 SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
416 SemanticTokenFontWeight::Bold => FontWeight::BOLD,
417 },
418 );
419
420 overwrite!(
421 highlight.font_style,
422 SemanticTokenRule::font_style,
423 |s| match s {
424 SemanticTokenFontStyle::Normal => FontStyle::Normal,
425 SemanticTokenFontStyle::Italic => FontStyle::Italic,
426 },
427 );
428
429 overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
430 UnderlineStyle {
431 thickness: 1.0.into(),
432 color: match u {
433 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
434 SemanticTokenColorOverride::InheritForeground(false) => None,
435 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
436 },
437 ..UnderlineStyle::default()
438 }
439 });
440
441 overwrite!(
442 highlight.strikethrough,
443 SemanticTokenRule::strikethrough,
444 |s| StrikethroughStyle {
445 thickness: 1.0.into(),
446 color: match s {
447 SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
448 SemanticTokenColorOverride::InheritForeground(false) => None,
449 SemanticTokenColorOverride::Replace(c) => Some(c.into()),
450 },
451 },
452 );
453 }
454
455 if empty { None } else { Some(highlight) }
456}
457
458#[cfg(test)]
459mod tests {
460 use std::{
461 ops::Range,
462 sync::atomic::{self, AtomicUsize},
463 };
464
465 use futures::StreamExt as _;
466 use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
467 use language::{Language, LanguageConfig, LanguageMatcher};
468 use languages::FakeLspAdapter;
469 use multi_buffer::{
470 AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
471 };
472 use project::Project;
473 use rope::Point;
474 use serde_json::json;
475 use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
476 use workspace::{MultiWorkspace, WorkspaceHandle as _};
477
478 use crate::{
479 Capability,
480 editor_tests::{init_test, update_test_language_settings},
481 test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
482 };
483
484 use super::*;
485
486 #[gpui::test]
487 async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
488 init_test(cx, |_| {});
489
490 update_test_language_settings(cx, &|language_settings| {
491 language_settings.languages.0.insert(
492 "Rust".into(),
493 LanguageSettingsContent {
494 semantic_tokens: Some(SemanticTokens::Full),
495 ..LanguageSettingsContent::default()
496 },
497 );
498 });
499
500 let mut cx = EditorLspTestContext::new_rust(
501 lsp::ServerCapabilities {
502 semantic_tokens_provider: Some(
503 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
504 lsp::SemanticTokensOptions {
505 legend: lsp::SemanticTokensLegend {
506 token_types: vec!["function".into()],
507 token_modifiers: Vec::new(),
508 },
509 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
510 ..lsp::SemanticTokensOptions::default()
511 },
512 ),
513 ),
514 ..lsp::ServerCapabilities::default()
515 },
516 cx,
517 )
518 .await;
519
520 let full_counter = Arc::new(AtomicUsize::new(0));
521 let full_counter_clone = full_counter.clone();
522
523 let mut full_request = cx
524 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
525 move |_, _, _| {
526 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
527 async move {
528 Ok(Some(lsp::SemanticTokensResult::Tokens(
529 lsp::SemanticTokens {
530 data: vec![
531 0, // delta_line
532 3, // delta_start
533 4, // length
534 0, // token_type
535 0, // token_modifiers_bitset
536 ],
537 // The server isn't capable of deltas, so even though we sent back
538 // a result ID, the client shouldn't request a delta.
539 result_id: Some("a".into()),
540 },
541 )))
542 }
543 },
544 );
545
546 cx.set_state("ˇfn main() {}");
547 assert!(full_request.next().await.is_some());
548
549 cx.run_until_parked();
550
551 cx.set_state("ˇfn main() { a }");
552 assert!(full_request.next().await.is_some());
553
554 cx.run_until_parked();
555
556 assert_eq!(
557 extract_semantic_highlights(&cx.editor, &cx),
558 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
559 );
560
561 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
562 }
563
564 #[gpui::test]
565 async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
566 init_test(cx, |_| {});
567
568 update_test_language_settings(cx, &|language_settings| {
569 language_settings.languages.0.insert(
570 "Rust".into(),
571 LanguageSettingsContent {
572 semantic_tokens: Some(SemanticTokens::Full),
573 ..LanguageSettingsContent::default()
574 },
575 );
576 });
577
578 let mut cx = EditorLspTestContext::new_rust(
579 lsp::ServerCapabilities {
580 semantic_tokens_provider: Some(
581 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
582 lsp::SemanticTokensOptions {
583 legend: lsp::SemanticTokensLegend {
584 token_types: vec!["function".into()],
585 token_modifiers: Vec::new(),
586 },
587 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
588 ..lsp::SemanticTokensOptions::default()
589 },
590 ),
591 ),
592 ..lsp::ServerCapabilities::default()
593 },
594 cx,
595 )
596 .await;
597
598 let full_counter = Arc::new(AtomicUsize::new(0));
599 let full_counter_clone = full_counter.clone();
600
601 let mut full_request = cx
602 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
603 move |_, _, _| {
604 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
605 async move {
606 Ok(Some(lsp::SemanticTokensResult::Tokens(
607 lsp::SemanticTokens {
608 data: vec![
609 0, // delta_line
610 3, // delta_start
611 4, // length
612 0, // token_type
613 0, // token_modifiers_bitset
614 ],
615 result_id: None, // Sending back `None` forces the client to not use deltas.
616 },
617 )))
618 }
619 },
620 );
621
622 cx.set_state("ˇfn main() {}");
623 assert!(full_request.next().await.is_some());
624
625 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
626 task.await;
627
628 cx.set_state("ˇfn main() { a }");
629 assert!(full_request.next().await.is_some());
630
631 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
632 task.await;
633 assert_eq!(
634 extract_semantic_highlights(&cx.editor, &cx),
635 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
636 );
637 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
638 }
639
640 #[gpui::test]
641 async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
642 init_test(cx, |_| {});
643
644 update_test_language_settings(cx, &|language_settings| {
645 language_settings.languages.0.insert(
646 "Rust".into(),
647 LanguageSettingsContent {
648 semantic_tokens: Some(SemanticTokens::Full),
649 ..LanguageSettingsContent::default()
650 },
651 );
652 });
653
654 let mut cx = EditorLspTestContext::new_rust(
655 lsp::ServerCapabilities {
656 semantic_tokens_provider: Some(
657 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
658 lsp::SemanticTokensOptions {
659 legend: lsp::SemanticTokensLegend {
660 token_types: vec!["function".into()],
661 token_modifiers: Vec::new(),
662 },
663 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
664 ..lsp::SemanticTokensOptions::default()
665 },
666 ),
667 ),
668 ..lsp::ServerCapabilities::default()
669 },
670 cx,
671 )
672 .await;
673
674 let full_counter = Arc::new(AtomicUsize::new(0));
675 let full_counter_clone = full_counter.clone();
676 let delta_counter = Arc::new(AtomicUsize::new(0));
677 let delta_counter_clone = delta_counter.clone();
678
679 let mut full_request = cx
680 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
681 move |_, _, _| {
682 full_counter_clone.fetch_add(1, atomic::Ordering::Release);
683 async move {
684 Ok(Some(lsp::SemanticTokensResult::Tokens(
685 lsp::SemanticTokens {
686 data: vec![
687 0, // delta_line
688 3, // delta_start
689 4, // length
690 0, // token_type
691 0, // token_modifiers_bitset
692 ],
693 result_id: Some("a".into()),
694 },
695 )))
696 }
697 },
698 );
699
700 let mut delta_request = cx
701 .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
702 move |_, params, _| {
703 delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
704 assert_eq!(params.previous_result_id, "a");
705 async move {
706 Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
707 lsp::SemanticTokensDelta {
708 edits: Vec::new(),
709 result_id: Some("b".into()),
710 },
711 )))
712 }
713 },
714 );
715
716 // Initial request, for the empty buffer.
717 cx.set_state("ˇfn main() {}");
718 assert!(full_request.next().await.is_some());
719 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
720 task.await;
721
722 cx.set_state("ˇfn main() { a }");
723 assert!(delta_request.next().await.is_some());
724 let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
725 task.await;
726
727 assert_eq!(
728 extract_semantic_highlights(&cx.editor, &cx),
729 vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
730 );
731
732 assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
733 assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
734 }
735
736 #[gpui::test]
737 async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
738 init_test(cx, |_| {});
739
740 update_test_language_settings(cx, &|language_settings| {
741 language_settings.languages.0.insert(
742 "TOML".into(),
743 LanguageSettingsContent {
744 semantic_tokens: Some(SemanticTokens::Full),
745 ..LanguageSettingsContent::default()
746 },
747 );
748 });
749
750 let toml_language = Arc::new(Language::new(
751 LanguageConfig {
752 name: "TOML".into(),
753 matcher: LanguageMatcher {
754 path_suffixes: vec!["toml".into()],
755 ..LanguageMatcher::default()
756 },
757 ..LanguageConfig::default()
758 },
759 None,
760 ));
761
762 // We have 2 language servers for TOML in this test.
763 let toml_legend_1 = lsp::SemanticTokensLegend {
764 token_types: vec!["property".into()],
765 token_modifiers: Vec::new(),
766 };
767 let toml_legend_2 = lsp::SemanticTokensLegend {
768 token_types: vec!["number".into()],
769 token_modifiers: Vec::new(),
770 };
771
772 let app_state = cx.update(workspace::AppState::test);
773
774 cx.update(|cx| {
775 assets::Assets.load_test_fonts(cx);
776 crate::init(cx);
777 workspace::init(app_state.clone(), cx);
778 });
779
780 let project = Project::test(app_state.fs.clone(), [], cx).await;
781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
782
783 let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
784 let full_counter_toml_1_clone = full_counter_toml_1.clone();
785 let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
786 let full_counter_toml_2_clone = full_counter_toml_2.clone();
787
788 let mut toml_server_1 = language_registry.register_fake_lsp(
789 toml_language.name(),
790 FakeLspAdapter {
791 name: "toml1",
792 capabilities: lsp::ServerCapabilities {
793 semantic_tokens_provider: Some(
794 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
795 lsp::SemanticTokensOptions {
796 legend: toml_legend_1,
797 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
798 ..lsp::SemanticTokensOptions::default()
799 },
800 ),
801 ),
802 ..lsp::ServerCapabilities::default()
803 },
804 initializer: Some(Box::new({
805 let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
806 move |fake_server| {
807 let full_counter = full_counter_toml_1_clone.clone();
808 fake_server
809 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
810 move |_, _| {
811 full_counter.fetch_add(1, atomic::Ordering::Release);
812 async move {
813 Ok(Some(lsp::SemanticTokensResult::Tokens(
814 lsp::SemanticTokens {
815 // highlight 'a' as a property
816 data: vec![
817 0, // delta_line
818 0, // delta_start
819 1, // length
820 0, // token_type
821 0, // token_modifiers_bitset
822 ],
823 result_id: Some("a".into()),
824 },
825 )))
826 }
827 },
828 );
829 }
830 })),
831 ..FakeLspAdapter::default()
832 },
833 );
834 let mut toml_server_2 = language_registry.register_fake_lsp(
835 toml_language.name(),
836 FakeLspAdapter {
837 name: "toml2",
838 capabilities: lsp::ServerCapabilities {
839 semantic_tokens_provider: Some(
840 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
841 lsp::SemanticTokensOptions {
842 legend: toml_legend_2,
843 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
844 ..lsp::SemanticTokensOptions::default()
845 },
846 ),
847 ),
848 ..lsp::ServerCapabilities::default()
849 },
850 initializer: Some(Box::new({
851 let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
852 move |fake_server| {
853 let full_counter = full_counter_toml_2_clone.clone();
854 fake_server
855 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
856 move |_, _| {
857 full_counter.fetch_add(1, atomic::Ordering::Release);
858 async move {
859 Ok(Some(lsp::SemanticTokensResult::Tokens(
860 lsp::SemanticTokens {
861 // highlight '3' as a literal
862 data: vec![
863 0, // delta_line
864 4, // delta_start
865 1, // length
866 0, // token_type
867 0, // token_modifiers_bitset
868 ],
869 result_id: Some("a".into()),
870 },
871 )))
872 }
873 },
874 );
875 }
876 })),
877 ..FakeLspAdapter::default()
878 },
879 );
880 language_registry.add(toml_language.clone());
881
882 app_state
883 .fs
884 .as_fake()
885 .insert_tree(
886 EditorLspTestContext::root_path(),
887 json!({
888 ".git": {},
889 "dir": {
890 "foo.toml": "a = 1\nb = 2\n",
891 }
892 }),
893 )
894 .await;
895
896 let (multi_workspace, cx) =
897 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
898 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
899 project
900 .update(cx, |project, cx| {
901 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
902 })
903 .await
904 .unwrap();
905 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
906 .await;
907
908 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
909 let toml_item = workspace
910 .update_in(cx, |workspace, window, cx| {
911 workspace.open_path(toml_file, None, true, window, cx)
912 })
913 .await
914 .expect("Could not open test file");
915
916 let editor = cx.update(|_, cx| {
917 toml_item
918 .act_as::<Editor>(cx)
919 .expect("Opened test file wasn't an editor")
920 });
921
922 editor.update_in(cx, |editor, window, cx| {
923 let nav_history = workspace
924 .read(cx)
925 .active_pane()
926 .read(cx)
927 .nav_history_for_item(&cx.entity());
928 editor.set_nav_history(Some(nav_history));
929 window.focus(&editor.focus_handle(cx), cx)
930 });
931
932 let _toml_server_1 = toml_server_1.next().await.unwrap();
933 let _toml_server_2 = toml_server_2.next().await.unwrap();
934
935 // Trigger semantic tokens.
936 editor.update_in(cx, |editor, _, cx| {
937 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
938 });
939 cx.executor().advance_clock(Duration::from_millis(200));
940 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
941 cx.run_until_parked();
942 task.await;
943
944 assert_eq!(
945 extract_semantic_highlights(&editor, &cx),
946 vec![
947 MultiBufferOffset(0)..MultiBufferOffset(1),
948 MultiBufferOffset(4)..MultiBufferOffset(5),
949 ]
950 );
951
952 assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
953 assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
954 }
955
956 #[gpui::test]
957 async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
958 init_test(cx, |_| {});
959
960 update_test_language_settings(cx, &|language_settings| {
961 language_settings.languages.0.insert(
962 "TOML".into(),
963 LanguageSettingsContent {
964 semantic_tokens: Some(SemanticTokens::Full),
965 ..LanguageSettingsContent::default()
966 },
967 );
968 language_settings.languages.0.insert(
969 "Rust".into(),
970 LanguageSettingsContent {
971 semantic_tokens: Some(SemanticTokens::Full),
972 ..LanguageSettingsContent::default()
973 },
974 );
975 });
976
977 let toml_language = Arc::new(Language::new(
978 LanguageConfig {
979 name: "TOML".into(),
980 matcher: LanguageMatcher {
981 path_suffixes: vec!["toml".into()],
982 ..LanguageMatcher::default()
983 },
984 ..LanguageConfig::default()
985 },
986 None,
987 ));
988 let rust_language = Arc::new(Language::new(
989 LanguageConfig {
990 name: "Rust".into(),
991 matcher: LanguageMatcher {
992 path_suffixes: vec!["rs".into()],
993 ..LanguageMatcher::default()
994 },
995 ..LanguageConfig::default()
996 },
997 None,
998 ));
999
1000 let toml_legend = lsp::SemanticTokensLegend {
1001 token_types: vec!["property".into()],
1002 token_modifiers: Vec::new(),
1003 };
1004 let rust_legend = lsp::SemanticTokensLegend {
1005 token_types: vec!["constant".into()],
1006 token_modifiers: Vec::new(),
1007 };
1008
1009 let app_state = cx.update(workspace::AppState::test);
1010
1011 cx.update(|cx| {
1012 assets::Assets.load_test_fonts(cx);
1013 crate::init(cx);
1014 workspace::init(app_state.clone(), cx);
1015 });
1016
1017 let project = Project::test(app_state.fs.clone(), [], cx).await;
1018 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1019 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1020 let full_counter_toml_clone = full_counter_toml.clone();
1021
1022 let mut toml_server = language_registry.register_fake_lsp(
1023 toml_language.name(),
1024 FakeLspAdapter {
1025 name: "toml",
1026 capabilities: lsp::ServerCapabilities {
1027 semantic_tokens_provider: Some(
1028 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1029 lsp::SemanticTokensOptions {
1030 legend: toml_legend,
1031 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1032 ..lsp::SemanticTokensOptions::default()
1033 },
1034 ),
1035 ),
1036 ..lsp::ServerCapabilities::default()
1037 },
1038 initializer: Some(Box::new({
1039 let full_counter_toml_clone = full_counter_toml_clone.clone();
1040 move |fake_server| {
1041 let full_counter = full_counter_toml_clone.clone();
1042 fake_server
1043 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1044 move |_, _| {
1045 full_counter.fetch_add(1, atomic::Ordering::Release);
1046 async move {
1047 Ok(Some(lsp::SemanticTokensResult::Tokens(
1048 lsp::SemanticTokens {
1049 // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1050 data: vec![
1051 0, // delta_line (line 0)
1052 0, // delta_start
1053 1, // length
1054 0, // token_type
1055 0, // token_modifiers_bitset
1056 1, // delta_line (line 1)
1057 0, // delta_start
1058 1, // length
1059 0, // token_type
1060 0, // token_modifiers_bitset
1061 1, // delta_line (line 2)
1062 0, // delta_start
1063 1, // length
1064 0, // token_type
1065 0, // token_modifiers_bitset
1066 ],
1067 result_id: Some("a".into()),
1068 },
1069 )))
1070 }
1071 },
1072 );
1073 }
1074 })),
1075 ..FakeLspAdapter::default()
1076 },
1077 );
1078 language_registry.add(toml_language.clone());
1079 let mut rust_server = language_registry.register_fake_lsp(
1080 rust_language.name(),
1081 FakeLspAdapter {
1082 name: "rust",
1083 capabilities: lsp::ServerCapabilities {
1084 semantic_tokens_provider: Some(
1085 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1086 lsp::SemanticTokensOptions {
1087 legend: rust_legend,
1088 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1089 ..lsp::SemanticTokensOptions::default()
1090 },
1091 ),
1092 ),
1093 ..lsp::ServerCapabilities::default()
1094 },
1095 ..FakeLspAdapter::default()
1096 },
1097 );
1098 language_registry.add(rust_language.clone());
1099
1100 app_state
1101 .fs
1102 .as_fake()
1103 .insert_tree(
1104 EditorLspTestContext::root_path(),
1105 json!({
1106 ".git": {},
1107 "dir": {
1108 "foo.toml": "a = 1\nb = 2\nc = 3\n",
1109 "bar.rs": "const c: usize = 3;\n",
1110 }
1111 }),
1112 )
1113 .await;
1114
1115 let (multi_workspace, cx) =
1116 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1117 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1118 project
1119 .update(cx, |project, cx| {
1120 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1121 })
1122 .await
1123 .unwrap();
1124 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1125 .await;
1126
1127 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1128 let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1129 let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1130 (
1131 workspace.open_path(toml_file, None, true, window, cx),
1132 workspace.open_path(rust_file, None, true, window, cx),
1133 )
1134 });
1135 let toml_item = toml_item.await.expect("Could not open test file");
1136 let rust_item = rust_item.await.expect("Could not open test file");
1137
1138 let (toml_editor, rust_editor) = cx.update(|_, cx| {
1139 (
1140 toml_item
1141 .act_as::<Editor>(cx)
1142 .expect("Opened test file wasn't an editor"),
1143 rust_item
1144 .act_as::<Editor>(cx)
1145 .expect("Opened test file wasn't an editor"),
1146 )
1147 });
1148 let toml_buffer = cx.read(|cx| {
1149 toml_editor
1150 .read(cx)
1151 .buffer()
1152 .read(cx)
1153 .as_singleton()
1154 .unwrap()
1155 });
1156 let rust_buffer = cx.read(|cx| {
1157 rust_editor
1158 .read(cx)
1159 .buffer()
1160 .read(cx)
1161 .as_singleton()
1162 .unwrap()
1163 });
1164 let multibuffer = cx.new(|cx| {
1165 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1166 multibuffer.push_excerpts(
1167 toml_buffer.clone(),
1168 [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1169 cx,
1170 );
1171 multibuffer.push_excerpts(
1172 rust_buffer.clone(),
1173 [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1174 cx,
1175 );
1176 multibuffer
1177 });
1178
1179 let editor = workspace.update_in(cx, |workspace, window, cx| {
1180 let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1181 workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1182 editor
1183 });
1184 editor.update_in(cx, |editor, window, cx| {
1185 let nav_history = workspace
1186 .read(cx)
1187 .active_pane()
1188 .read(cx)
1189 .nav_history_for_item(&cx.entity());
1190 editor.set_nav_history(Some(nav_history));
1191 window.focus(&editor.focus_handle(cx), cx)
1192 });
1193
1194 let _toml_server = toml_server.next().await.unwrap();
1195 let _rust_server = rust_server.next().await.unwrap();
1196
1197 // Initial request.
1198 cx.executor().advance_clock(Duration::from_millis(200));
1199 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1200 cx.run_until_parked();
1201 task.await;
1202 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1203 cx.run_until_parked();
1204
1205 // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1206 // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1207 assert_eq!(
1208 extract_semantic_highlights(&editor, &cx),
1209 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1210 );
1211
1212 // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1213 let toml_excerpt_id =
1214 editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1215 editor.update_in(cx, |editor, _, cx| {
1216 editor.buffer().update(cx, |buffer, cx| {
1217 buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1218 });
1219 });
1220
1221 // Wait for semantic tokens to be re-fetched after expansion.
1222 cx.executor().advance_clock(Duration::from_millis(200));
1223 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1224 cx.run_until_parked();
1225 task.await;
1226
1227 // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1228 // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1229 // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1230 assert_eq!(
1231 extract_semantic_highlights(&editor, &cx),
1232 vec![
1233 MultiBufferOffset(0)..MultiBufferOffset(1),
1234 MultiBufferOffset(6)..MultiBufferOffset(7),
1235 MultiBufferOffset(12)..MultiBufferOffset(13),
1236 ]
1237 );
1238 }
1239
1240 #[gpui::test]
1241 async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
1242 init_test(cx, |_| {});
1243
1244 update_test_language_settings(cx, &|language_settings| {
1245 language_settings.languages.0.insert(
1246 "TOML".into(),
1247 LanguageSettingsContent {
1248 semantic_tokens: Some(SemanticTokens::Full),
1249 ..LanguageSettingsContent::default()
1250 },
1251 );
1252 });
1253
1254 let toml_language = Arc::new(Language::new(
1255 LanguageConfig {
1256 name: "TOML".into(),
1257 matcher: LanguageMatcher {
1258 path_suffixes: vec!["toml".into()],
1259 ..LanguageMatcher::default()
1260 },
1261 ..LanguageConfig::default()
1262 },
1263 None,
1264 ));
1265
1266 let toml_legend = lsp::SemanticTokensLegend {
1267 token_types: vec!["property".into()],
1268 token_modifiers: Vec::new(),
1269 };
1270
1271 let app_state = cx.update(workspace::AppState::test);
1272
1273 cx.update(|cx| {
1274 assets::Assets.load_test_fonts(cx);
1275 crate::init(cx);
1276 workspace::init(app_state.clone(), cx);
1277 });
1278
1279 let project = Project::test(app_state.fs.clone(), [], cx).await;
1280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1281 let full_counter_toml = Arc::new(AtomicUsize::new(0));
1282 let full_counter_toml_clone = full_counter_toml.clone();
1283
1284 let mut toml_server = language_registry.register_fake_lsp(
1285 toml_language.name(),
1286 FakeLspAdapter {
1287 name: "toml",
1288 capabilities: lsp::ServerCapabilities {
1289 semantic_tokens_provider: Some(
1290 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1291 lsp::SemanticTokensOptions {
1292 legend: toml_legend,
1293 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1294 ..lsp::SemanticTokensOptions::default()
1295 },
1296 ),
1297 ),
1298 ..lsp::ServerCapabilities::default()
1299 },
1300 initializer: Some(Box::new({
1301 let full_counter_toml_clone = full_counter_toml_clone.clone();
1302 move |fake_server| {
1303 let full_counter = full_counter_toml_clone.clone();
1304 fake_server
1305 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1306 move |_, _| {
1307 full_counter.fetch_add(1, atomic::Ordering::Release);
1308 async move {
1309 Ok(Some(lsp::SemanticTokensResult::Tokens(
1310 lsp::SemanticTokens {
1311 // highlight 'a' as a property
1312 data: vec![
1313 0, // delta_line
1314 0, // delta_start
1315 1, // length
1316 0, // token_type
1317 0, // token_modifiers_bitset
1318 ],
1319 result_id: Some("a".into()),
1320 },
1321 )))
1322 }
1323 },
1324 );
1325 }
1326 })),
1327 ..FakeLspAdapter::default()
1328 },
1329 );
1330 language_registry.add(toml_language.clone());
1331
1332 app_state
1333 .fs
1334 .as_fake()
1335 .insert_tree(
1336 EditorLspTestContext::root_path(),
1337 json!({
1338 ".git": {},
1339 "dir": {
1340 "foo.toml": "a = 1\nb = 2\n",
1341 }
1342 }),
1343 )
1344 .await;
1345
1346 let (multi_workspace, cx) =
1347 cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1348 let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1349 project
1350 .update(cx, |project, cx| {
1351 project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1352 })
1353 .await
1354 .unwrap();
1355 cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1356 .await;
1357
1358 let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1359 let toml_item = workspace
1360 .update_in(cx, |workspace, window, cx| {
1361 workspace.open_path(toml_file, None, true, window, cx)
1362 })
1363 .await
1364 .expect("Could not open test file");
1365
1366 let toml_editor = cx.update(|_, cx| {
1367 toml_item
1368 .act_as::<Editor>(cx)
1369 .expect("Opened test file wasn't an editor")
1370 });
1371 let toml_buffer = cx.read(|cx| {
1372 toml_editor
1373 .read(cx)
1374 .buffer()
1375 .read(cx)
1376 .as_singleton()
1377 .unwrap()
1378 });
1379 let multibuffer = cx.new(|cx| {
1380 let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1381 multibuffer.push_excerpts(
1382 toml_buffer.clone(),
1383 [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1384 cx,
1385 );
1386 multibuffer.push_excerpts(
1387 toml_buffer.clone(),
1388 [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1389 cx,
1390 );
1391 multibuffer
1392 });
1393
1394 let editor = workspace.update_in(cx, |_, window, cx| {
1395 cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
1396 });
1397 editor.update_in(cx, |editor, window, cx| {
1398 let nav_history = workspace
1399 .read(cx)
1400 .active_pane()
1401 .read(cx)
1402 .nav_history_for_item(&cx.entity());
1403 editor.set_nav_history(Some(nav_history));
1404 window.focus(&editor.focus_handle(cx), cx)
1405 });
1406
1407 let _toml_server = toml_server.next().await.unwrap();
1408
1409 // Initial request.
1410 cx.executor().advance_clock(Duration::from_millis(200));
1411 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1412 cx.run_until_parked();
1413 task.await;
1414 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1415
1416 // Edit two parts of the multibuffer, which both map to the same buffer.
1417 //
1418 // Without debouncing, this grabs semantic tokens 4 times (twice for the
1419 // toml editor, and twice for the multibuffer).
1420 editor.update_in(cx, |editor, _, cx| {
1421 editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
1422 editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
1423 });
1424 cx.executor().advance_clock(Duration::from_millis(200));
1425 let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1426 cx.run_until_parked();
1427 task.await;
1428 assert_eq!(
1429 extract_semantic_highlights(&editor, &cx),
1430 vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1431 );
1432
1433 assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
1434 }
1435
1436 fn extract_semantic_highlights(
1437 editor: &Entity<Editor>,
1438 cx: &TestAppContext,
1439 ) -> Vec<Range<MultiBufferOffset>> {
1440 editor.read_with(cx, |editor, cx| {
1441 let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1442 editor
1443 .display_map
1444 .read(cx)
1445 .semantic_token_highlights
1446 .iter()
1447 .flat_map(|(_, (v, _))| v.iter())
1448 .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1449 .collect()
1450 })
1451 }
1452
1453 #[gpui::test]
1454 async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1455 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1456 use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1457
1458 init_test(cx, |_| {});
1459
1460 update_test_language_settings(cx, &|language_settings| {
1461 language_settings.languages.0.insert(
1462 "Rust".into(),
1463 LanguageSettingsContent {
1464 semantic_tokens: Some(SemanticTokens::Full),
1465 ..LanguageSettingsContent::default()
1466 },
1467 );
1468 });
1469
1470 let mut cx = EditorLspTestContext::new_rust(
1471 lsp::ServerCapabilities {
1472 semantic_tokens_provider: Some(
1473 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1474 lsp::SemanticTokensOptions {
1475 legend: lsp::SemanticTokensLegend {
1476 token_types: Vec::from(["function".into()]),
1477 token_modifiers: Vec::new(),
1478 },
1479 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1480 ..lsp::SemanticTokensOptions::default()
1481 },
1482 ),
1483 ),
1484 ..lsp::ServerCapabilities::default()
1485 },
1486 cx,
1487 )
1488 .await;
1489
1490 let mut full_request = cx
1491 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1492 move |_, _, _| {
1493 async move {
1494 Ok(Some(lsp::SemanticTokensResult::Tokens(
1495 lsp::SemanticTokens {
1496 data: vec![
1497 0, // delta_line
1498 3, // delta_start
1499 4, // length
1500 0, // token_type (function)
1501 0, // token_modifiers_bitset
1502 ],
1503 result_id: None,
1504 },
1505 )))
1506 }
1507 },
1508 );
1509
1510 // Trigger initial semantic tokens fetch
1511 cx.set_state("ˇfn main() {}");
1512 full_request.next().await;
1513 cx.run_until_parked();
1514
1515 // Verify initial highlights exist (with no custom color yet)
1516 let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1517 assert_eq!(
1518 initial_ranges,
1519 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1520 "Should have initial semantic token highlights"
1521 );
1522 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1523 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1524 // Initial color should be None or theme default (not red or blue)
1525 let initial_color = initial_styles[0].color;
1526
1527 // Set a custom foreground color for function tokens via settings.json
1528 let red_color = Rgba {
1529 r: 1.0,
1530 g: 0.0,
1531 b: 0.0,
1532 a: 1.0,
1533 };
1534 cx.update(|_, cx| {
1535 SettingsStore::update_global(cx, |store, cx| {
1536 store.update_user_settings(cx, |settings| {
1537 settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1538 semantic_token_rules: Some(SemanticTokenRules {
1539 rules: Vec::from([SemanticTokenRule {
1540 token_type: Some("function".to_string()),
1541 foreground_color: Some(red_color),
1542 ..SemanticTokenRule::default()
1543 }]),
1544 }),
1545 ..GlobalLspSettingsContent::default()
1546 });
1547 });
1548 });
1549 });
1550
1551 // Trigger a refetch by making an edit (which forces semantic tokens update)
1552 cx.set_state("ˇfn main() { }");
1553 full_request.next().await;
1554 cx.run_until_parked();
1555
1556 // Verify the highlights now have the custom red color
1557 let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1558 assert_eq!(
1559 styles_after_settings_change.len(),
1560 1,
1561 "Should still have one highlight"
1562 );
1563 assert_eq!(
1564 styles_after_settings_change[0].color,
1565 Some(Hsla::from(red_color)),
1566 "Highlight should have the custom red color from settings.json"
1567 );
1568 assert_ne!(
1569 styles_after_settings_change[0].color, initial_color,
1570 "Color should have changed from initial"
1571 );
1572 }
1573
1574 #[gpui::test]
1575 async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1576 use collections::IndexMap;
1577 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1578 use theme::{HighlightStyleContent, ThemeStyleContent};
1579
1580 init_test(cx, |_| {});
1581
1582 update_test_language_settings(cx, &|language_settings| {
1583 language_settings.languages.0.insert(
1584 "Rust".into(),
1585 LanguageSettingsContent {
1586 semantic_tokens: Some(SemanticTokens::Full),
1587 ..LanguageSettingsContent::default()
1588 },
1589 );
1590 });
1591
1592 let mut cx = EditorLspTestContext::new_rust(
1593 lsp::ServerCapabilities {
1594 semantic_tokens_provider: Some(
1595 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1596 lsp::SemanticTokensOptions {
1597 legend: lsp::SemanticTokensLegend {
1598 token_types: Vec::from(["function".into()]),
1599 token_modifiers: Vec::new(),
1600 },
1601 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1602 ..lsp::SemanticTokensOptions::default()
1603 },
1604 ),
1605 ),
1606 ..lsp::ServerCapabilities::default()
1607 },
1608 cx,
1609 )
1610 .await;
1611
1612 let mut full_request = cx
1613 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1614 move |_, _, _| async move {
1615 Ok(Some(lsp::SemanticTokensResult::Tokens(
1616 lsp::SemanticTokens {
1617 data: vec![
1618 0, // delta_line
1619 3, // delta_start
1620 4, // length
1621 0, // token_type (function)
1622 0, // token_modifiers_bitset
1623 ],
1624 result_id: None,
1625 },
1626 )))
1627 },
1628 );
1629
1630 cx.set_state("ˇfn main() {}");
1631 full_request.next().await;
1632 cx.run_until_parked();
1633
1634 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1635 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1636 let initial_color = initial_styles[0].color;
1637
1638 // Changing experimental_theme_overrides triggers GlobalTheme reload,
1639 // which fires theme_changed → refresh_semantic_token_highlights.
1640 let red_color: Hsla = Rgba {
1641 r: 1.0,
1642 g: 0.0,
1643 b: 0.0,
1644 a: 1.0,
1645 }
1646 .into();
1647 cx.update(|_, cx| {
1648 SettingsStore::update_global(cx, |store, cx| {
1649 store.update_user_settings(cx, |settings| {
1650 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1651 syntax: IndexMap::from_iter([(
1652 "function".to_string(),
1653 HighlightStyleContent {
1654 color: Some("#ff0000".to_string()),
1655 background_color: None,
1656 font_style: None,
1657 font_weight: None,
1658 },
1659 )]),
1660 ..ThemeStyleContent::default()
1661 });
1662 });
1663 });
1664 });
1665
1666 cx.executor().advance_clock(Duration::from_millis(200));
1667 cx.run_until_parked();
1668
1669 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1670 assert_eq!(styles_after_override.len(), 1);
1671 assert_eq!(
1672 styles_after_override[0].color,
1673 Some(red_color),
1674 "Highlight should have red color from theme override"
1675 );
1676 assert_ne!(
1677 styles_after_override[0].color, initial_color,
1678 "Color should have changed from initial"
1679 );
1680
1681 // Changing the override to a different color also restyles.
1682 let blue_color: Hsla = Rgba {
1683 r: 0.0,
1684 g: 0.0,
1685 b: 1.0,
1686 a: 1.0,
1687 }
1688 .into();
1689 cx.update(|_, cx| {
1690 SettingsStore::update_global(cx, |store, cx| {
1691 store.update_user_settings(cx, |settings| {
1692 settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1693 syntax: IndexMap::from_iter([(
1694 "function".to_string(),
1695 HighlightStyleContent {
1696 color: Some("#0000ff".to_string()),
1697 background_color: None,
1698 font_style: None,
1699 font_weight: None,
1700 },
1701 )]),
1702 ..ThemeStyleContent::default()
1703 });
1704 });
1705 });
1706 });
1707
1708 cx.executor().advance_clock(Duration::from_millis(200));
1709 cx.run_until_parked();
1710
1711 let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1712 assert_eq!(styles_after_second_override.len(), 1);
1713 assert_eq!(
1714 styles_after_second_override[0].color,
1715 Some(blue_color),
1716 "Highlight should have blue color from updated theme override"
1717 );
1718
1719 // Removing overrides reverts to the original theme color.
1720 cx.update(|_, cx| {
1721 SettingsStore::update_global(cx, |store, cx| {
1722 store.update_user_settings(cx, |settings| {
1723 settings.theme.experimental_theme_overrides = None;
1724 });
1725 });
1726 });
1727
1728 cx.executor().advance_clock(Duration::from_millis(200));
1729 cx.run_until_parked();
1730
1731 let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1732 assert_eq!(styles_after_clear.len(), 1);
1733 assert_eq!(
1734 styles_after_clear[0].color, initial_color,
1735 "Highlight should revert to initial color after clearing overrides"
1736 );
1737 }
1738
1739 #[gpui::test]
1740 async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1741 use collections::IndexMap;
1742 use gpui::{Hsla, Rgba, UpdateGlobal as _};
1743 use theme::{HighlightStyleContent, ThemeStyleContent};
1744 use ui::ActiveTheme as _;
1745
1746 init_test(cx, |_| {});
1747
1748 update_test_language_settings(cx, &|language_settings| {
1749 language_settings.languages.0.insert(
1750 "Rust".into(),
1751 LanguageSettingsContent {
1752 semantic_tokens: Some(SemanticTokens::Full),
1753 ..LanguageSettingsContent::default()
1754 },
1755 );
1756 });
1757
1758 let mut cx = EditorLspTestContext::new_rust(
1759 lsp::ServerCapabilities {
1760 semantic_tokens_provider: Some(
1761 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1762 lsp::SemanticTokensOptions {
1763 legend: lsp::SemanticTokensLegend {
1764 token_types: Vec::from(["function".into()]),
1765 token_modifiers: Vec::new(),
1766 },
1767 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1768 ..lsp::SemanticTokensOptions::default()
1769 },
1770 ),
1771 ),
1772 ..lsp::ServerCapabilities::default()
1773 },
1774 cx,
1775 )
1776 .await;
1777
1778 let mut full_request = cx
1779 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1780 move |_, _, _| async move {
1781 Ok(Some(lsp::SemanticTokensResult::Tokens(
1782 lsp::SemanticTokens {
1783 data: vec![
1784 0, // delta_line
1785 3, // delta_start
1786 4, // length
1787 0, // token_type (function)
1788 0, // token_modifiers_bitset
1789 ],
1790 result_id: None,
1791 },
1792 )))
1793 },
1794 );
1795
1796 cx.set_state("ˇfn main() {}");
1797 full_request.next().await;
1798 cx.run_until_parked();
1799
1800 let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1801 assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1802 let initial_color = initial_styles[0].color;
1803
1804 // Per-theme overrides (theme_overrides keyed by theme name) also go through
1805 // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1806 let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1807 let green_color: Hsla = Rgba {
1808 r: 0.0,
1809 g: 1.0,
1810 b: 0.0,
1811 a: 1.0,
1812 }
1813 .into();
1814 cx.update(|_, cx| {
1815 SettingsStore::update_global(cx, |store, cx| {
1816 store.update_user_settings(cx, |settings| {
1817 settings.theme.theme_overrides = collections::HashMap::from_iter([(
1818 theme_name.clone(),
1819 ThemeStyleContent {
1820 syntax: IndexMap::from_iter([(
1821 "function".to_string(),
1822 HighlightStyleContent {
1823 color: Some("#00ff00".to_string()),
1824 background_color: None,
1825 font_style: None,
1826 font_weight: None,
1827 },
1828 )]),
1829 ..ThemeStyleContent::default()
1830 },
1831 )]);
1832 });
1833 });
1834 });
1835
1836 cx.executor().advance_clock(Duration::from_millis(200));
1837 cx.run_until_parked();
1838
1839 let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1840 assert_eq!(styles_after_override.len(), 1);
1841 assert_eq!(
1842 styles_after_override[0].color,
1843 Some(green_color),
1844 "Highlight should have green color from per-theme override"
1845 );
1846 assert_ne!(
1847 styles_after_override[0].color, initial_color,
1848 "Color should have changed from initial"
1849 );
1850 }
1851
1852 #[gpui::test]
1853 async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1854 init_test(cx, |_| {});
1855
1856 update_test_language_settings(cx, &|language_settings| {
1857 language_settings.languages.0.insert(
1858 "Rust".into(),
1859 LanguageSettingsContent {
1860 semantic_tokens: Some(SemanticTokens::Full),
1861 ..LanguageSettingsContent::default()
1862 },
1863 );
1864 });
1865
1866 let mut cx = EditorLspTestContext::new_rust(
1867 lsp::ServerCapabilities {
1868 semantic_tokens_provider: Some(
1869 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1870 lsp::SemanticTokensOptions {
1871 legend: lsp::SemanticTokensLegend {
1872 token_types: vec!["function".into()],
1873 token_modifiers: Vec::new(),
1874 },
1875 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1876 ..lsp::SemanticTokensOptions::default()
1877 },
1878 ),
1879 ),
1880 ..lsp::ServerCapabilities::default()
1881 },
1882 cx,
1883 )
1884 .await;
1885
1886 let mut full_request = cx
1887 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1888 move |_, _, _| async move {
1889 Ok(Some(lsp::SemanticTokensResult::Tokens(
1890 lsp::SemanticTokens {
1891 data: vec![
1892 0, // delta_line
1893 3, // delta_start
1894 4, // length
1895 0, // token_type
1896 0, // token_modifiers_bitset
1897 ],
1898 result_id: None,
1899 },
1900 )))
1901 },
1902 );
1903
1904 cx.set_state("ˇfn main() {}");
1905 assert!(full_request.next().await.is_some());
1906 cx.run_until_parked();
1907
1908 assert_eq!(
1909 extract_semantic_highlights(&cx.editor, &cx),
1910 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1911 "Semantic tokens should be present before stopping the server"
1912 );
1913
1914 cx.update_editor(|editor, _, cx| {
1915 let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1916 editor.project.as_ref().unwrap().update(cx, |project, cx| {
1917 project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1918 })
1919 });
1920 cx.executor().advance_clock(Duration::from_millis(200));
1921 cx.run_until_parked();
1922
1923 assert_eq!(
1924 extract_semantic_highlights(&cx.editor, &cx),
1925 Vec::new(),
1926 "Semantic tokens should be cleared after stopping the server"
1927 );
1928 }
1929
1930 #[gpui::test]
1931 async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1932 init_test(cx, |_| {});
1933
1934 update_test_language_settings(cx, &|language_settings| {
1935 language_settings.languages.0.insert(
1936 "Rust".into(),
1937 LanguageSettingsContent {
1938 semantic_tokens: Some(SemanticTokens::Full),
1939 ..LanguageSettingsContent::default()
1940 },
1941 );
1942 });
1943
1944 let mut cx = EditorLspTestContext::new_rust(
1945 lsp::ServerCapabilities {
1946 semantic_tokens_provider: Some(
1947 lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1948 lsp::SemanticTokensOptions {
1949 legend: lsp::SemanticTokensLegend {
1950 token_types: vec!["function".into()],
1951 token_modifiers: Vec::new(),
1952 },
1953 full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1954 ..lsp::SemanticTokensOptions::default()
1955 },
1956 ),
1957 ),
1958 ..lsp::ServerCapabilities::default()
1959 },
1960 cx,
1961 )
1962 .await;
1963
1964 let mut full_request = cx
1965 .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1966 move |_, _, _| async move {
1967 Ok(Some(lsp::SemanticTokensResult::Tokens(
1968 lsp::SemanticTokens {
1969 data: vec![
1970 0, // delta_line
1971 3, // delta_start
1972 4, // length
1973 0, // token_type
1974 0, // token_modifiers_bitset
1975 ],
1976 result_id: None,
1977 },
1978 )))
1979 },
1980 );
1981
1982 cx.set_state("ˇfn main() {}");
1983 assert!(full_request.next().await.is_some());
1984 cx.run_until_parked();
1985
1986 assert_eq!(
1987 extract_semantic_highlights(&cx.editor, &cx),
1988 vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1989 "Semantic tokens should be present before disabling the setting"
1990 );
1991
1992 update_test_language_settings(&mut cx, &|language_settings| {
1993 language_settings.languages.0.insert(
1994 "Rust".into(),
1995 LanguageSettingsContent {
1996 semantic_tokens: Some(SemanticTokens::Off),
1997 ..LanguageSettingsContent::default()
1998 },
1999 );
2000 });
2001 cx.executor().advance_clock(Duration::from_millis(200));
2002 cx.run_until_parked();
2003
2004 assert_eq!(
2005 extract_semantic_highlights(&cx.editor, &cx),
2006 Vec::new(),
2007 "Semantic tokens should be cleared after disabling the setting"
2008 );
2009 }
2010
2011 fn extract_semantic_highlight_styles(
2012 editor: &Entity<Editor>,
2013 cx: &TestAppContext,
2014 ) -> Vec<HighlightStyle> {
2015 editor.read_with(cx, |editor, cx| {
2016 editor
2017 .display_map
2018 .read(cx)
2019 .semantic_token_highlights
2020 .iter()
2021 .flat_map(|(_, (v, interner))| {
2022 v.iter().map(|highlights| interner[highlights.style])
2023 })
2024 .collect()
2025 })
2026 }
2027}