semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use itertools::Itertools;
   9use language::language_settings::LanguageSettings;
  10use project::{
  11    lsp_store::{
  12        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  13        TokenType,
  14    },
  15    project_settings::ProjectSettings,
  16};
  17use settings::{
  18    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
  19    SemanticTokenRules, Settings as _,
  20};
  21use text::BufferId;
  22use theme::SyntaxTheme;
  23use ui::ActiveTheme as _;
  24
  25use crate::{
  26    Editor,
  27    actions::ToggleSemanticHighlights,
  28    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  29};
  30
  31pub(super) struct SemanticTokenState {
  32    rules: SemanticTokenRules,
  33    enabled: bool,
  34    update_task: Task<()>,
  35    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  36}
  37
  38impl SemanticTokenState {
  39    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  40        Self {
  41            rules: ProjectSettings::get_global(cx)
  42                .global_lsp_settings
  43                .semantic_token_rules
  44                .clone(),
  45            enabled,
  46            update_task: Task::ready(()),
  47            fetched_for_buffers: HashMap::default(),
  48        }
  49    }
  50
  51    pub(super) fn enabled(&self) -> bool {
  52        self.enabled
  53    }
  54
  55    pub(super) fn toggle_enabled(&mut self) {
  56        self.enabled = !self.enabled;
  57    }
  58
  59    #[cfg(test)]
  60    pub(super) fn take_update_task(&mut self) -> Task<()> {
  61        std::mem::replace(&mut self.update_task, Task::ready(()))
  62    }
  63
  64    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  65        self.fetched_for_buffers.remove(buffer_id);
  66    }
  67
  68    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  69        if new_rules != self.rules {
  70            self.rules = new_rules;
  71            true
  72        } else {
  73            false
  74        }
  75    }
  76}
  77
  78impl Editor {
  79    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  80        let Some(provider) = self.semantics_provider.as_ref() else {
  81            return false;
  82        };
  83
  84        let mut supports = false;
  85        self.buffer().update(cx, |this, cx| {
  86            this.for_each_buffer(&mut |buffer| {
  87                supports |= provider.supports_semantic_tokens(buffer, cx);
  88            });
  89        });
  90
  91        supports
  92    }
  93
  94    pub fn semantic_highlights_enabled(&self) -> bool {
  95        self.semantic_token_state.enabled()
  96    }
  97
  98    pub fn toggle_semantic_highlights(
  99        &mut self,
 100        _: &ToggleSemanticHighlights,
 101        _window: &mut gpui::Window,
 102        cx: &mut Context<Self>,
 103    ) {
 104        self.semantic_token_state.toggle_enabled();
 105        self.invalidate_semantic_tokens(None);
 106        self.refresh_semantic_tokens(None, None, cx);
 107    }
 108
 109    pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
 110        match for_buffer {
 111            Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
 112            None => self.semantic_token_state.fetched_for_buffers.clear(),
 113        }
 114    }
 115
 116    pub(super) fn refresh_semantic_tokens(
 117        &mut self,
 118        buffer_id: Option<BufferId>,
 119        for_server: Option<RefreshForServer>,
 120        cx: &mut Context<Self>,
 121    ) {
 122        if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
 123            self.invalidate_semantic_tokens(None);
 124            self.display_map.update(cx, |display_map, _| {
 125                match Arc::get_mut(&mut display_map.semantic_token_highlights) {
 126                    Some(highlights) => highlights.clear(),
 127                    None => display_map.semantic_token_highlights = Arc::new(Default::default()),
 128                };
 129            });
 130            self.semantic_token_state.update_task = Task::ready(());
 131            cx.notify();
 132            return;
 133        }
 134
 135        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 136        if for_server.is_some() {
 137            invalidate_semantic_highlights_for_buffers.extend(
 138                self.semantic_token_state
 139                    .fetched_for_buffers
 140                    .drain()
 141                    .map(|(buffer_id, _)| buffer_id),
 142            );
 143        }
 144
 145        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 146        else {
 147            return;
 148        };
 149
 150        let buffers_to_query = self
 151            .visible_buffers(cx)
 152            .into_iter()
 153            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
 154            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 155            .filter_map(|editor_buffer| {
 156                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 157                if self.registered_buffers.contains_key(&editor_buffer_id)
 158                    && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
 159                        .semantic_tokens
 160                        .enabled()
 161                {
 162                    Some((editor_buffer_id, editor_buffer))
 163                } else {
 164                    None
 165                }
 166            })
 167            .collect::<HashMap<_, _>>();
 168
 169        for buffer_with_disabled_tokens in self
 170            .display_map
 171            .read(cx)
 172            .semantic_token_highlights
 173            .keys()
 174            .copied()
 175            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 176            .filter(|buffer_id| {
 177                !self
 178                    .buffer
 179                    .read(cx)
 180                    .buffer(*buffer_id)
 181                    .is_some_and(|buffer| {
 182                        let buffer = buffer.read(cx);
 183                        LanguageSettings::for_buffer(&buffer, cx)
 184                            .semantic_tokens
 185                            .enabled()
 186                    })
 187            })
 188            .collect::<Vec<_>>()
 189        {
 190            self.semantic_token_state
 191                .invalidate_buffer(&buffer_with_disabled_tokens);
 192            self.display_map.update(cx, |display_map, _| {
 193                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 194            });
 195        }
 196
 197        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 198            cx.background_executor()
 199                .timer(Duration::from_millis(50))
 200                .await;
 201            let Some(all_semantic_tokens_task) = editor
 202                .update(cx, |editor, cx| {
 203                    buffers_to_query
 204                        .into_iter()
 205                        .filter_map(|(buffer_id, buffer)| {
 206                            let known_version = editor
 207                                .semantic_token_state
 208                                .fetched_for_buffers
 209                                .get(&buffer_id);
 210                            let query_version = buffer.read(cx).version();
 211                            if known_version.is_some_and(|known_version| {
 212                                !query_version.changed_since(known_version)
 213                            }) {
 214                                None
 215                            } else {
 216                                sema.semantic_tokens(buffer, for_server, cx).map(
 217                                    |task| async move { (buffer_id, query_version, task.await) },
 218                                )
 219                            }
 220                        })
 221                        .collect::<Vec<_>>()
 222                })
 223                .ok()
 224            else {
 225                return;
 226            };
 227
 228            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 229            editor
 230                .update(cx, |editor, cx| {
 231                    editor.display_map.update(cx, |display_map, _| {
 232                        for buffer_id in invalidate_semantic_highlights_for_buffers {
 233                            display_map.invalidate_semantic_highlights(buffer_id);
 234                            editor.semantic_token_state.invalidate_buffer(&buffer_id);
 235                        }
 236                    });
 237
 238                    if all_semantic_tokens.is_empty() {
 239                        return;
 240                    }
 241                    let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 242
 243                    for (buffer_id, query_version, tokens) in all_semantic_tokens {
 244                        let tokens = match tokens {
 245                            Ok(BufferSemanticTokens {
 246                                tokens: Some(tokens),
 247                            }) => tokens,
 248                            Ok(BufferSemanticTokens { tokens: None }) => {
 249                                editor.display_map.update(cx, |display_map, _| {
 250                                    display_map.invalidate_semantic_highlights(buffer_id);
 251                                });
 252                                continue;
 253                            }
 254                            Err(e) => {
 255                                log::error!(
 256                                    "Failed to fetch semantic tokens for buffer \
 257                                    {buffer_id:?}: {e:#}"
 258                                );
 259                                continue;
 260                            }
 261                        };
 262
 263                        match editor
 264                            .semantic_token_state
 265                            .fetched_for_buffers
 266                            .entry(buffer_id)
 267                        {
 268                            hash_map::Entry::Occupied(mut o) => {
 269                                if query_version.changed_since(o.get()) {
 270                                    o.insert(query_version);
 271                                } else {
 272                                    continue;
 273                                }
 274                            }
 275                            hash_map::Entry::Vacant(v) => {
 276                                v.insert(query_version);
 277                            }
 278                        }
 279
 280                        let language_name = editor
 281                            .buffer()
 282                            .read(cx)
 283                            .buffer(buffer_id)
 284                            .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 285
 286                        editor.display_map.update(cx, |display_map, cx| {
 287                            project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 288                                let mut token_highlights = Vec::new();
 289                                let mut interner = HighlightStyleInterner::default();
 290                                for (server_id, server_tokens) in tokens {
 291                                    let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 292                                        server_id,
 293                                        language_name.as_ref(),
 294                                        cx,
 295                                    ) else {
 296                                        continue;
 297                                    };
 298                                    token_highlights.reserve(2 * server_tokens.len());
 299                                    token_highlights.extend(buffer_into_editor_highlights(
 300                                        &server_tokens,
 301                                        stylizer,
 302                                        &multi_buffer_snapshot,
 303                                        &mut interner,
 304                                        cx,
 305                                    ));
 306                                }
 307
 308                                token_highlights.sort_by(|a, b| {
 309                                    a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 310                                });
 311                                Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
 312                                    buffer_id,
 313                                    (Arc::from(token_highlights), Arc::new(interner)),
 314                                );
 315                            });
 316                        });
 317                    }
 318
 319                    cx.notify();
 320                })
 321                .ok();
 322        });
 323    }
 324}
 325
 326fn buffer_into_editor_highlights<'a, 'b>(
 327    buffer_tokens: &'a [BufferSemanticToken],
 328    stylizer: &'a SemanticTokenStylizer,
 329    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 330    interner: &'b mut HighlightStyleInterner,
 331    cx: &'a App,
 332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 333    multi_buffer_snapshot
 334        .text_anchors_to_visible_anchors(
 335            buffer_tokens
 336                .iter()
 337                .flat_map(|token| [token.range.start, token.range.end]),
 338        )
 339        .into_iter()
 340        .tuples::<(_, _)>()
 341        .zip(buffer_tokens)
 342        .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
 343            let range = multi_buffer_start?..multi_buffer_end?;
 344            let style = convert_token(
 345                stylizer,
 346                cx.theme().syntax(),
 347                token.token_type,
 348                token.token_modifiers,
 349            )?;
 350            let style = interner.intern(style);
 351            Some(SemanticTokenHighlight {
 352                range,
 353                style,
 354                token_type: token.token_type,
 355                token_modifiers: token.token_modifiers,
 356                server_id: stylizer.server_id(),
 357            })
 358        })
 359}
 360
 361fn convert_token(
 362    stylizer: &SemanticTokenStylizer,
 363    theme: &SyntaxTheme,
 364    token_type: TokenType,
 365    modifiers: u32,
 366) -> Option<HighlightStyle> {
 367    let rules = stylizer.rules_for_token(token_type)?;
 368    let matching = rules.iter().filter(|rule| {
 369        rule.token_modifiers
 370            .iter()
 371            .all(|m| stylizer.has_modifier(modifiers, m))
 372    });
 373
 374    let mut highlight = HighlightStyle::default();
 375    let mut empty = true;
 376
 377    for rule in matching {
 378        empty = false;
 379
 380        let style = rule
 381            .style
 382            .iter()
 383            .find_map(|style| theme.style_for_name(style));
 384
 385        macro_rules! overwrite {
 386            (
 387                highlight.$highlight_field:ident,
 388                SemanticTokenRule::$rule_field:ident,
 389                $transform:expr $(,)?
 390            ) => {
 391                highlight.$highlight_field = rule
 392                    .$rule_field
 393                    .map($transform)
 394                    .or_else(|| style.and_then(|s| s.$highlight_field))
 395                    .or(highlight.$highlight_field)
 396            };
 397        }
 398
 399        overwrite!(
 400            highlight.color,
 401            SemanticTokenRule::foreground_color,
 402            Into::into,
 403        );
 404
 405        overwrite!(
 406            highlight.background_color,
 407            SemanticTokenRule::background_color,
 408            Into::into,
 409        );
 410
 411        overwrite!(
 412            highlight.font_weight,
 413            SemanticTokenRule::font_weight,
 414            |w| match w {
 415                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 416                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 417            },
 418        );
 419
 420        overwrite!(
 421            highlight.font_style,
 422            SemanticTokenRule::font_style,
 423            |s| match s {
 424                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 425                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 426            },
 427        );
 428
 429        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 430            UnderlineStyle {
 431                thickness: 1.0.into(),
 432                color: match u {
 433                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 434                    SemanticTokenColorOverride::InheritForeground(false) => None,
 435                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 436                },
 437                ..UnderlineStyle::default()
 438            }
 439        });
 440
 441        overwrite!(
 442            highlight.strikethrough,
 443            SemanticTokenRule::strikethrough,
 444            |s| StrikethroughStyle {
 445                thickness: 1.0.into(),
 446                color: match s {
 447                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 448                    SemanticTokenColorOverride::InheritForeground(false) => None,
 449                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 450                },
 451            },
 452        );
 453    }
 454
 455    if empty { None } else { Some(highlight) }
 456}
 457
 458#[cfg(test)]
 459mod tests {
 460    use std::{
 461        ops::Range,
 462        sync::atomic::{self, AtomicUsize},
 463    };
 464
 465    use futures::StreamExt as _;
 466    use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
 467    use language::{Language, LanguageConfig, LanguageMatcher};
 468    use languages::FakeLspAdapter;
 469    use multi_buffer::{
 470        AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
 471    };
 472    use project::Project;
 473    use rope::Point;
 474    use serde_json::json;
 475    use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
 476    use workspace::{MultiWorkspace, WorkspaceHandle as _};
 477
 478    use crate::{
 479        Capability,
 480        editor_tests::{init_test, update_test_language_settings},
 481        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 482    };
 483
 484    use super::*;
 485
 486    #[gpui::test]
 487    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 488        init_test(cx, |_| {});
 489
 490        update_test_language_settings(cx, &|language_settings| {
 491            language_settings.languages.0.insert(
 492                "Rust".into(),
 493                LanguageSettingsContent {
 494                    semantic_tokens: Some(SemanticTokens::Full),
 495                    ..LanguageSettingsContent::default()
 496                },
 497            );
 498        });
 499
 500        let mut cx = EditorLspTestContext::new_rust(
 501            lsp::ServerCapabilities {
 502                semantic_tokens_provider: Some(
 503                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 504                        lsp::SemanticTokensOptions {
 505                            legend: lsp::SemanticTokensLegend {
 506                                token_types: vec!["function".into()],
 507                                token_modifiers: Vec::new(),
 508                            },
 509                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 510                            ..lsp::SemanticTokensOptions::default()
 511                        },
 512                    ),
 513                ),
 514                ..lsp::ServerCapabilities::default()
 515            },
 516            cx,
 517        )
 518        .await;
 519
 520        let full_counter = Arc::new(AtomicUsize::new(0));
 521        let full_counter_clone = full_counter.clone();
 522
 523        let mut full_request = cx
 524            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 525                move |_, _, _| {
 526                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 527                    async move {
 528                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 529                            lsp::SemanticTokens {
 530                                data: vec![
 531                                    0, // delta_line
 532                                    3, // delta_start
 533                                    4, // length
 534                                    0, // token_type
 535                                    0, // token_modifiers_bitset
 536                                ],
 537                                // The server isn't capable of deltas, so even though we sent back
 538                                // a result ID, the client shouldn't request a delta.
 539                                result_id: Some("a".into()),
 540                            },
 541                        )))
 542                    }
 543                },
 544            );
 545
 546        cx.set_state("ˇfn main() {}");
 547        assert!(full_request.next().await.is_some());
 548
 549        cx.run_until_parked();
 550
 551        cx.set_state("ˇfn main() { a }");
 552        assert!(full_request.next().await.is_some());
 553
 554        cx.run_until_parked();
 555
 556        assert_eq!(
 557            extract_semantic_highlights(&cx.editor, &cx),
 558            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 559        );
 560
 561        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 562    }
 563
 564    #[gpui::test]
 565    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 566        init_test(cx, |_| {});
 567
 568        update_test_language_settings(cx, &|language_settings| {
 569            language_settings.languages.0.insert(
 570                "Rust".into(),
 571                LanguageSettingsContent {
 572                    semantic_tokens: Some(SemanticTokens::Full),
 573                    ..LanguageSettingsContent::default()
 574                },
 575            );
 576        });
 577
 578        let mut cx = EditorLspTestContext::new_rust(
 579            lsp::ServerCapabilities {
 580                semantic_tokens_provider: Some(
 581                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 582                        lsp::SemanticTokensOptions {
 583                            legend: lsp::SemanticTokensLegend {
 584                                token_types: vec!["function".into()],
 585                                token_modifiers: Vec::new(),
 586                            },
 587                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 588                            ..lsp::SemanticTokensOptions::default()
 589                        },
 590                    ),
 591                ),
 592                ..lsp::ServerCapabilities::default()
 593            },
 594            cx,
 595        )
 596        .await;
 597
 598        let full_counter = Arc::new(AtomicUsize::new(0));
 599        let full_counter_clone = full_counter.clone();
 600
 601        let mut full_request = cx
 602            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 603                move |_, _, _| {
 604                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 605                    async move {
 606                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 607                            lsp::SemanticTokens {
 608                                data: vec![
 609                                    0, // delta_line
 610                                    3, // delta_start
 611                                    4, // length
 612                                    0, // token_type
 613                                    0, // token_modifiers_bitset
 614                                ],
 615                                result_id: None, // Sending back `None` forces the client to not use deltas.
 616                            },
 617                        )))
 618                    }
 619                },
 620            );
 621
 622        cx.set_state("ˇfn main() {}");
 623        assert!(full_request.next().await.is_some());
 624
 625        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 626        task.await;
 627
 628        cx.set_state("ˇfn main() { a }");
 629        assert!(full_request.next().await.is_some());
 630
 631        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 632        task.await;
 633        assert_eq!(
 634            extract_semantic_highlights(&cx.editor, &cx),
 635            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 636        );
 637        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 638    }
 639
 640    #[gpui::test]
 641    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 642        init_test(cx, |_| {});
 643
 644        update_test_language_settings(cx, &|language_settings| {
 645            language_settings.languages.0.insert(
 646                "Rust".into(),
 647                LanguageSettingsContent {
 648                    semantic_tokens: Some(SemanticTokens::Full),
 649                    ..LanguageSettingsContent::default()
 650                },
 651            );
 652        });
 653
 654        let mut cx = EditorLspTestContext::new_rust(
 655            lsp::ServerCapabilities {
 656                semantic_tokens_provider: Some(
 657                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 658                        lsp::SemanticTokensOptions {
 659                            legend: lsp::SemanticTokensLegend {
 660                                token_types: vec!["function".into()],
 661                                token_modifiers: Vec::new(),
 662                            },
 663                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 664                            ..lsp::SemanticTokensOptions::default()
 665                        },
 666                    ),
 667                ),
 668                ..lsp::ServerCapabilities::default()
 669            },
 670            cx,
 671        )
 672        .await;
 673
 674        let full_counter = Arc::new(AtomicUsize::new(0));
 675        let full_counter_clone = full_counter.clone();
 676        let delta_counter = Arc::new(AtomicUsize::new(0));
 677        let delta_counter_clone = delta_counter.clone();
 678
 679        let mut full_request = cx
 680            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 681                move |_, _, _| {
 682                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 683                    async move {
 684                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 685                            lsp::SemanticTokens {
 686                                data: vec![
 687                                    0, // delta_line
 688                                    3, // delta_start
 689                                    4, // length
 690                                    0, // token_type
 691                                    0, // token_modifiers_bitset
 692                                ],
 693                                result_id: Some("a".into()),
 694                            },
 695                        )))
 696                    }
 697                },
 698            );
 699
 700        let mut delta_request = cx
 701            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 702                move |_, params, _| {
 703                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 704                    assert_eq!(params.previous_result_id, "a");
 705                    async move {
 706                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 707                            lsp::SemanticTokensDelta {
 708                                edits: Vec::new(),
 709                                result_id: Some("b".into()),
 710                            },
 711                        )))
 712                    }
 713                },
 714            );
 715
 716        // Initial request, for the empty buffer.
 717        cx.set_state("ˇfn main() {}");
 718        assert!(full_request.next().await.is_some());
 719        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 720        task.await;
 721
 722        cx.set_state("ˇfn main() { a }");
 723        assert!(delta_request.next().await.is_some());
 724        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 725        task.await;
 726
 727        assert_eq!(
 728            extract_semantic_highlights(&cx.editor, &cx),
 729            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 730        );
 731
 732        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 733        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 734    }
 735
 736    #[gpui::test]
 737    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 738        init_test(cx, |_| {});
 739
 740        update_test_language_settings(cx, &|language_settings| {
 741            language_settings.languages.0.insert(
 742                "TOML".into(),
 743                LanguageSettingsContent {
 744                    semantic_tokens: Some(SemanticTokens::Full),
 745                    ..LanguageSettingsContent::default()
 746                },
 747            );
 748        });
 749
 750        let toml_language = Arc::new(Language::new(
 751            LanguageConfig {
 752                name: "TOML".into(),
 753                matcher: LanguageMatcher {
 754                    path_suffixes: vec!["toml".into()],
 755                    ..LanguageMatcher::default()
 756                },
 757                ..LanguageConfig::default()
 758            },
 759            None,
 760        ));
 761
 762        // We have 2 language servers for TOML in this test.
 763        let toml_legend_1 = lsp::SemanticTokensLegend {
 764            token_types: vec!["property".into()],
 765            token_modifiers: Vec::new(),
 766        };
 767        let toml_legend_2 = lsp::SemanticTokensLegend {
 768            token_types: vec!["number".into()],
 769            token_modifiers: Vec::new(),
 770        };
 771
 772        let app_state = cx.update(workspace::AppState::test);
 773
 774        cx.update(|cx| {
 775            assets::Assets.load_test_fonts(cx);
 776            crate::init(cx);
 777            workspace::init(app_state.clone(), cx);
 778        });
 779
 780        let project = Project::test(app_state.fs.clone(), [], cx).await;
 781        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 782
 783        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 784        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 785        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 786        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 787
 788        let mut toml_server_1 = language_registry.register_fake_lsp(
 789            toml_language.name(),
 790            FakeLspAdapter {
 791                name: "toml1",
 792                capabilities: lsp::ServerCapabilities {
 793                    semantic_tokens_provider: Some(
 794                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 795                            lsp::SemanticTokensOptions {
 796                                legend: toml_legend_1,
 797                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 798                                ..lsp::SemanticTokensOptions::default()
 799                            },
 800                        ),
 801                    ),
 802                    ..lsp::ServerCapabilities::default()
 803                },
 804                initializer: Some(Box::new({
 805                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 806                    move |fake_server| {
 807                        let full_counter = full_counter_toml_1_clone.clone();
 808                        fake_server
 809                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 810                                move |_, _| {
 811                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 812                                    async move {
 813                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 814                                            lsp::SemanticTokens {
 815                                                // highlight 'a' as a property
 816                                                data: vec![
 817                                                    0, // delta_line
 818                                                    0, // delta_start
 819                                                    1, // length
 820                                                    0, // token_type
 821                                                    0, // token_modifiers_bitset
 822                                                ],
 823                                                result_id: Some("a".into()),
 824                                            },
 825                                        )))
 826                                    }
 827                                },
 828                            );
 829                    }
 830                })),
 831                ..FakeLspAdapter::default()
 832            },
 833        );
 834        let mut toml_server_2 = language_registry.register_fake_lsp(
 835            toml_language.name(),
 836            FakeLspAdapter {
 837                name: "toml2",
 838                capabilities: lsp::ServerCapabilities {
 839                    semantic_tokens_provider: Some(
 840                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 841                            lsp::SemanticTokensOptions {
 842                                legend: toml_legend_2,
 843                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 844                                ..lsp::SemanticTokensOptions::default()
 845                            },
 846                        ),
 847                    ),
 848                    ..lsp::ServerCapabilities::default()
 849                },
 850                initializer: Some(Box::new({
 851                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 852                    move |fake_server| {
 853                        let full_counter = full_counter_toml_2_clone.clone();
 854                        fake_server
 855                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 856                                move |_, _| {
 857                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 858                                    async move {
 859                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 860                                            lsp::SemanticTokens {
 861                                                // highlight '3' as a literal
 862                                                data: vec![
 863                                                    0, // delta_line
 864                                                    4, // delta_start
 865                                                    1, // length
 866                                                    0, // token_type
 867                                                    0, // token_modifiers_bitset
 868                                                ],
 869                                                result_id: Some("a".into()),
 870                                            },
 871                                        )))
 872                                    }
 873                                },
 874                            );
 875                    }
 876                })),
 877                ..FakeLspAdapter::default()
 878            },
 879        );
 880        language_registry.add(toml_language.clone());
 881
 882        app_state
 883            .fs
 884            .as_fake()
 885            .insert_tree(
 886                EditorLspTestContext::root_path(),
 887                json!({
 888                    ".git": {},
 889                    "dir": {
 890                        "foo.toml": "a = 1\nb = 2\n",
 891                    }
 892                }),
 893            )
 894            .await;
 895
 896        let (multi_workspace, cx) =
 897            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
 898        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 899        project
 900            .update(cx, |project, cx| {
 901                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 902            })
 903            .await
 904            .unwrap();
 905        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 906            .await;
 907
 908        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 909        let toml_item = workspace
 910            .update_in(cx, |workspace, window, cx| {
 911                workspace.open_path(toml_file, None, true, window, cx)
 912            })
 913            .await
 914            .expect("Could not open test file");
 915
 916        let editor = cx.update(|_, cx| {
 917            toml_item
 918                .act_as::<Editor>(cx)
 919                .expect("Opened test file wasn't an editor")
 920        });
 921
 922        editor.update_in(cx, |editor, window, cx| {
 923            let nav_history = workspace
 924                .read(cx)
 925                .active_pane()
 926                .read(cx)
 927                .nav_history_for_item(&cx.entity());
 928            editor.set_nav_history(Some(nav_history));
 929            window.focus(&editor.focus_handle(cx), cx)
 930        });
 931
 932        let _toml_server_1 = toml_server_1.next().await.unwrap();
 933        let _toml_server_2 = toml_server_2.next().await.unwrap();
 934
 935        // Trigger semantic tokens.
 936        editor.update_in(cx, |editor, _, cx| {
 937            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 938        });
 939        cx.executor().advance_clock(Duration::from_millis(200));
 940        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
 941        cx.run_until_parked();
 942        task.await;
 943
 944        assert_eq!(
 945            extract_semantic_highlights(&editor, &cx),
 946            vec![
 947                MultiBufferOffset(0)..MultiBufferOffset(1),
 948                MultiBufferOffset(4)..MultiBufferOffset(5),
 949            ]
 950        );
 951
 952        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 953        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 954    }
 955
 956    #[gpui::test]
 957    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 958        init_test(cx, |_| {});
 959
 960        update_test_language_settings(cx, &|language_settings| {
 961            language_settings.languages.0.insert(
 962                "TOML".into(),
 963                LanguageSettingsContent {
 964                    semantic_tokens: Some(SemanticTokens::Full),
 965                    ..LanguageSettingsContent::default()
 966                },
 967            );
 968            language_settings.languages.0.insert(
 969                "Rust".into(),
 970                LanguageSettingsContent {
 971                    semantic_tokens: Some(SemanticTokens::Full),
 972                    ..LanguageSettingsContent::default()
 973                },
 974            );
 975        });
 976
 977        let toml_language = Arc::new(Language::new(
 978            LanguageConfig {
 979                name: "TOML".into(),
 980                matcher: LanguageMatcher {
 981                    path_suffixes: vec!["toml".into()],
 982                    ..LanguageMatcher::default()
 983                },
 984                ..LanguageConfig::default()
 985            },
 986            None,
 987        ));
 988        let rust_language = Arc::new(Language::new(
 989            LanguageConfig {
 990                name: "Rust".into(),
 991                matcher: LanguageMatcher {
 992                    path_suffixes: vec!["rs".into()],
 993                    ..LanguageMatcher::default()
 994                },
 995                ..LanguageConfig::default()
 996            },
 997            None,
 998        ));
 999
1000        let toml_legend = lsp::SemanticTokensLegend {
1001            token_types: vec!["property".into()],
1002            token_modifiers: Vec::new(),
1003        };
1004        let rust_legend = lsp::SemanticTokensLegend {
1005            token_types: vec!["constant".into()],
1006            token_modifiers: Vec::new(),
1007        };
1008
1009        let app_state = cx.update(workspace::AppState::test);
1010
1011        cx.update(|cx| {
1012            assets::Assets.load_test_fonts(cx);
1013            crate::init(cx);
1014            workspace::init(app_state.clone(), cx);
1015        });
1016
1017        let project = Project::test(app_state.fs.clone(), [], cx).await;
1018        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1019        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1020        let full_counter_toml_clone = full_counter_toml.clone();
1021
1022        let mut toml_server = language_registry.register_fake_lsp(
1023            toml_language.name(),
1024            FakeLspAdapter {
1025                name: "toml",
1026                capabilities: lsp::ServerCapabilities {
1027                    semantic_tokens_provider: Some(
1028                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1029                            lsp::SemanticTokensOptions {
1030                                legend: toml_legend,
1031                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1032                                ..lsp::SemanticTokensOptions::default()
1033                            },
1034                        ),
1035                    ),
1036                    ..lsp::ServerCapabilities::default()
1037                },
1038                initializer: Some(Box::new({
1039                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1040                    move |fake_server| {
1041                        let full_counter = full_counter_toml_clone.clone();
1042                        fake_server
1043                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1044                                move |_, _| {
1045                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1046                                    async move {
1047                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1048                                            lsp::SemanticTokens {
1049                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1050                                                data: vec![
1051                                                    0, // delta_line (line 0)
1052                                                    0, // delta_start
1053                                                    1, // length
1054                                                    0, // token_type
1055                                                    0, // token_modifiers_bitset
1056                                                    1, // delta_line (line 1)
1057                                                    0, // delta_start
1058                                                    1, // length
1059                                                    0, // token_type
1060                                                    0, // token_modifiers_bitset
1061                                                    1, // delta_line (line 2)
1062                                                    0, // delta_start
1063                                                    1, // length
1064                                                    0, // token_type
1065                                                    0, // token_modifiers_bitset
1066                                                ],
1067                                                result_id: Some("a".into()),
1068                                            },
1069                                        )))
1070                                    }
1071                                },
1072                            );
1073                    }
1074                })),
1075                ..FakeLspAdapter::default()
1076            },
1077        );
1078        language_registry.add(toml_language.clone());
1079        let mut rust_server = language_registry.register_fake_lsp(
1080            rust_language.name(),
1081            FakeLspAdapter {
1082                name: "rust",
1083                capabilities: lsp::ServerCapabilities {
1084                    semantic_tokens_provider: Some(
1085                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1086                            lsp::SemanticTokensOptions {
1087                                legend: rust_legend,
1088                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1089                                ..lsp::SemanticTokensOptions::default()
1090                            },
1091                        ),
1092                    ),
1093                    ..lsp::ServerCapabilities::default()
1094                },
1095                ..FakeLspAdapter::default()
1096            },
1097        );
1098        language_registry.add(rust_language.clone());
1099
1100        app_state
1101            .fs
1102            .as_fake()
1103            .insert_tree(
1104                EditorLspTestContext::root_path(),
1105                json!({
1106                    ".git": {},
1107                    "dir": {
1108                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1109                        "bar.rs": "const c: usize = 3;\n",
1110                    }
1111                }),
1112            )
1113            .await;
1114
1115        let (multi_workspace, cx) =
1116            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1117        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1118        project
1119            .update(cx, |project, cx| {
1120                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1121            })
1122            .await
1123            .unwrap();
1124        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1125            .await;
1126
1127        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1128        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1129        let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1130            (
1131                workspace.open_path(toml_file, None, true, window, cx),
1132                workspace.open_path(rust_file, None, true, window, cx),
1133            )
1134        });
1135        let toml_item = toml_item.await.expect("Could not open test file");
1136        let rust_item = rust_item.await.expect("Could not open test file");
1137
1138        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1139            (
1140                toml_item
1141                    .act_as::<Editor>(cx)
1142                    .expect("Opened test file wasn't an editor"),
1143                rust_item
1144                    .act_as::<Editor>(cx)
1145                    .expect("Opened test file wasn't an editor"),
1146            )
1147        });
1148        let toml_buffer = cx.read(|cx| {
1149            toml_editor
1150                .read(cx)
1151                .buffer()
1152                .read(cx)
1153                .as_singleton()
1154                .unwrap()
1155        });
1156        let rust_buffer = cx.read(|cx| {
1157            rust_editor
1158                .read(cx)
1159                .buffer()
1160                .read(cx)
1161                .as_singleton()
1162                .unwrap()
1163        });
1164        let multibuffer = cx.new(|cx| {
1165            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1166            multibuffer.set_excerpts_for_path(
1167                PathKey::sorted(0),
1168                toml_buffer.clone(),
1169                [Point::new(0, 0)..Point::new(0, 4)],
1170                0,
1171                cx,
1172            );
1173            multibuffer.set_excerpts_for_path(
1174                PathKey::sorted(1),
1175                rust_buffer.clone(),
1176                [Point::new(0, 0)..Point::new(0, 4)],
1177                0,
1178                cx,
1179            );
1180            multibuffer
1181        });
1182
1183        let editor = workspace.update_in(cx, |workspace, window, cx| {
1184            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1185            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1186            editor
1187        });
1188        editor.update_in(cx, |editor, window, cx| {
1189            let nav_history = workspace
1190                .read(cx)
1191                .active_pane()
1192                .read(cx)
1193                .nav_history_for_item(&cx.entity());
1194            editor.set_nav_history(Some(nav_history));
1195            window.focus(&editor.focus_handle(cx), cx)
1196        });
1197
1198        let _toml_server = toml_server.next().await.unwrap();
1199        let _rust_server = rust_server.next().await.unwrap();
1200
1201        // Initial request.
1202        cx.executor().advance_clock(Duration::from_millis(200));
1203        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1204        cx.run_until_parked();
1205        task.await;
1206        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1207        cx.run_until_parked();
1208
1209        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1210        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1211        assert_eq!(
1212            extract_semantic_highlights(&editor, &cx),
1213            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1214        );
1215
1216        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1217        let toml_anchor = editor.read_with(cx, |editor, cx| {
1218            editor
1219                .buffer()
1220                .read(cx)
1221                .snapshot(cx)
1222                .anchor_in_excerpt(text::Anchor::min_for_buffer(
1223                    toml_buffer.read(cx).remote_id(),
1224                ))
1225                .unwrap()
1226        });
1227        editor.update_in(cx, |editor, _, cx| {
1228            editor.buffer().update(cx, |buffer, cx| {
1229                buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
1230            });
1231        });
1232
1233        // Wait for semantic tokens to be re-fetched after expansion.
1234        cx.executor().advance_clock(Duration::from_millis(200));
1235        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1236        cx.run_until_parked();
1237        task.await;
1238
1239        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1240        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1241        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1242        assert_eq!(
1243            extract_semantic_highlights(&editor, &cx),
1244            vec![
1245                MultiBufferOffset(0)..MultiBufferOffset(1),
1246                MultiBufferOffset(6)..MultiBufferOffset(7),
1247                MultiBufferOffset(12)..MultiBufferOffset(13),
1248            ]
1249        );
1250    }
1251
1252    fn extract_semantic_highlights(
1253        editor: &Entity<Editor>,
1254        cx: &TestAppContext,
1255    ) -> Vec<Range<MultiBufferOffset>> {
1256        editor.read_with(cx, |editor, cx| {
1257            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1258            editor
1259                .display_map
1260                .read(cx)
1261                .semantic_token_highlights
1262                .iter()
1263                .flat_map(|(_, (v, _))| v.iter())
1264                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1265                .collect()
1266        })
1267    }
1268
1269    #[gpui::test]
1270    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1271        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1272        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1273
1274        init_test(cx, |_| {});
1275
1276        update_test_language_settings(cx, &|language_settings| {
1277            language_settings.languages.0.insert(
1278                "Rust".into(),
1279                LanguageSettingsContent {
1280                    semantic_tokens: Some(SemanticTokens::Full),
1281                    ..LanguageSettingsContent::default()
1282                },
1283            );
1284        });
1285
1286        let mut cx = EditorLspTestContext::new_rust(
1287            lsp::ServerCapabilities {
1288                semantic_tokens_provider: Some(
1289                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1290                        lsp::SemanticTokensOptions {
1291                            legend: lsp::SemanticTokensLegend {
1292                                token_types: Vec::from(["function".into()]),
1293                                token_modifiers: Vec::new(),
1294                            },
1295                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1296                            ..lsp::SemanticTokensOptions::default()
1297                        },
1298                    ),
1299                ),
1300                ..lsp::ServerCapabilities::default()
1301            },
1302            cx,
1303        )
1304        .await;
1305
1306        let mut full_request = cx
1307            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1308                move |_, _, _| {
1309                    async move {
1310                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1311                            lsp::SemanticTokens {
1312                                data: vec![
1313                                    0, // delta_line
1314                                    3, // delta_start
1315                                    4, // length
1316                                    0, // token_type (function)
1317                                    0, // token_modifiers_bitset
1318                                ],
1319                                result_id: None,
1320                            },
1321                        )))
1322                    }
1323                },
1324            );
1325
1326        // Trigger initial semantic tokens fetch
1327        cx.set_state("ˇfn main() {}");
1328        full_request.next().await;
1329        cx.run_until_parked();
1330
1331        // Verify initial highlights exist (with no custom color yet)
1332        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1333        assert_eq!(
1334            initial_ranges,
1335            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1336            "Should have initial semantic token highlights"
1337        );
1338        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1339        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1340        // Initial color should be None or theme default (not red or blue)
1341        let initial_color = initial_styles[0].color;
1342
1343        // Set a custom foreground color for function tokens via settings.json
1344        let red_color = Rgba {
1345            r: 1.0,
1346            g: 0.0,
1347            b: 0.0,
1348            a: 1.0,
1349        };
1350        cx.update(|_, cx| {
1351            SettingsStore::update_global(cx, |store, cx| {
1352                store.update_user_settings(cx, |settings| {
1353                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1354                        semantic_token_rules: Some(SemanticTokenRules {
1355                            rules: Vec::from([SemanticTokenRule {
1356                                token_type: Some("function".to_string()),
1357                                foreground_color: Some(red_color),
1358                                ..SemanticTokenRule::default()
1359                            }]),
1360                        }),
1361                        ..GlobalLspSettingsContent::default()
1362                    });
1363                });
1364            });
1365        });
1366
1367        // Trigger a refetch by making an edit (which forces semantic tokens update)
1368        cx.set_state("ˇfn main() { }");
1369        full_request.next().await;
1370        cx.run_until_parked();
1371
1372        // Verify the highlights now have the custom red color
1373        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1374        assert_eq!(
1375            styles_after_settings_change.len(),
1376            1,
1377            "Should still have one highlight"
1378        );
1379        assert_eq!(
1380            styles_after_settings_change[0].color,
1381            Some(Hsla::from(red_color)),
1382            "Highlight should have the custom red color from settings.json"
1383        );
1384        assert_ne!(
1385            styles_after_settings_change[0].color, initial_color,
1386            "Color should have changed from initial"
1387        );
1388    }
1389
1390    #[gpui::test]
1391    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1392        use collections::IndexMap;
1393        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1394        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1395
1396        init_test(cx, |_| {});
1397
1398        update_test_language_settings(cx, &|language_settings| {
1399            language_settings.languages.0.insert(
1400                "Rust".into(),
1401                LanguageSettingsContent {
1402                    semantic_tokens: Some(SemanticTokens::Full),
1403                    ..LanguageSettingsContent::default()
1404                },
1405            );
1406        });
1407
1408        let mut cx = EditorLspTestContext::new_rust(
1409            lsp::ServerCapabilities {
1410                semantic_tokens_provider: Some(
1411                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1412                        lsp::SemanticTokensOptions {
1413                            legend: lsp::SemanticTokensLegend {
1414                                token_types: Vec::from(["function".into()]),
1415                                token_modifiers: Vec::new(),
1416                            },
1417                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1418                            ..lsp::SemanticTokensOptions::default()
1419                        },
1420                    ),
1421                ),
1422                ..lsp::ServerCapabilities::default()
1423            },
1424            cx,
1425        )
1426        .await;
1427
1428        let mut full_request = cx
1429            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1430                move |_, _, _| async move {
1431                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1432                        lsp::SemanticTokens {
1433                            data: vec![
1434                                0, // delta_line
1435                                3, // delta_start
1436                                4, // length
1437                                0, // token_type (function)
1438                                0, // token_modifiers_bitset
1439                            ],
1440                            result_id: None,
1441                        },
1442                    )))
1443                },
1444            );
1445
1446        cx.set_state("ˇfn main() {}");
1447        full_request.next().await;
1448        cx.run_until_parked();
1449
1450        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1451        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1452        let initial_color = initial_styles[0].color;
1453
1454        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1455        // which fires theme_changed → refresh_semantic_token_highlights.
1456        let red_color: Hsla = Rgba {
1457            r: 1.0,
1458            g: 0.0,
1459            b: 0.0,
1460            a: 1.0,
1461        }
1462        .into();
1463        cx.update(|_, cx| {
1464            SettingsStore::update_global(cx, |store, cx| {
1465                store.update_user_settings(cx, |settings| {
1466                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1467                        syntax: IndexMap::from_iter([(
1468                            "function".to_string(),
1469                            HighlightStyleContent {
1470                                color: Some("#ff0000".to_string()),
1471                                background_color: None,
1472                                font_style: None,
1473                                font_weight: None,
1474                            },
1475                        )]),
1476                        ..ThemeStyleContent::default()
1477                    });
1478                });
1479            });
1480        });
1481
1482        cx.executor().advance_clock(Duration::from_millis(200));
1483        cx.run_until_parked();
1484
1485        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1486        assert_eq!(styles_after_override.len(), 1);
1487        assert_eq!(
1488            styles_after_override[0].color,
1489            Some(red_color),
1490            "Highlight should have red color from theme override"
1491        );
1492        assert_ne!(
1493            styles_after_override[0].color, initial_color,
1494            "Color should have changed from initial"
1495        );
1496
1497        // Changing the override to a different color also restyles.
1498        let blue_color: Hsla = Rgba {
1499            r: 0.0,
1500            g: 0.0,
1501            b: 1.0,
1502            a: 1.0,
1503        }
1504        .into();
1505        cx.update(|_, cx| {
1506            SettingsStore::update_global(cx, |store, cx| {
1507                store.update_user_settings(cx, |settings| {
1508                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1509                        syntax: IndexMap::from_iter([(
1510                            "function".to_string(),
1511                            HighlightStyleContent {
1512                                color: Some("#0000ff".to_string()),
1513                                background_color: None,
1514                                font_style: None,
1515                                font_weight: None,
1516                            },
1517                        )]),
1518                        ..ThemeStyleContent::default()
1519                    });
1520                });
1521            });
1522        });
1523
1524        cx.executor().advance_clock(Duration::from_millis(200));
1525        cx.run_until_parked();
1526
1527        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1528        assert_eq!(styles_after_second_override.len(), 1);
1529        assert_eq!(
1530            styles_after_second_override[0].color,
1531            Some(blue_color),
1532            "Highlight should have blue color from updated theme override"
1533        );
1534
1535        // Removing overrides reverts to the original theme color.
1536        cx.update(|_, cx| {
1537            SettingsStore::update_global(cx, |store, cx| {
1538                store.update_user_settings(cx, |settings| {
1539                    settings.theme.experimental_theme_overrides = None;
1540                });
1541            });
1542        });
1543
1544        cx.executor().advance_clock(Duration::from_millis(200));
1545        cx.run_until_parked();
1546
1547        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1548        assert_eq!(styles_after_clear.len(), 1);
1549        assert_eq!(
1550            styles_after_clear[0].color, initial_color,
1551            "Highlight should revert to initial color after clearing overrides"
1552        );
1553    }
1554
1555    #[gpui::test]
1556    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1557        use collections::IndexMap;
1558        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1559        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1560        use ui::ActiveTheme as _;
1561
1562        init_test(cx, |_| {});
1563
1564        update_test_language_settings(cx, &|language_settings| {
1565            language_settings.languages.0.insert(
1566                "Rust".into(),
1567                LanguageSettingsContent {
1568                    semantic_tokens: Some(SemanticTokens::Full),
1569                    ..LanguageSettingsContent::default()
1570                },
1571            );
1572        });
1573
1574        let mut cx = EditorLspTestContext::new_rust(
1575            lsp::ServerCapabilities {
1576                semantic_tokens_provider: Some(
1577                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1578                        lsp::SemanticTokensOptions {
1579                            legend: lsp::SemanticTokensLegend {
1580                                token_types: Vec::from(["function".into()]),
1581                                token_modifiers: Vec::new(),
1582                            },
1583                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1584                            ..lsp::SemanticTokensOptions::default()
1585                        },
1586                    ),
1587                ),
1588                ..lsp::ServerCapabilities::default()
1589            },
1590            cx,
1591        )
1592        .await;
1593
1594        let mut full_request = cx
1595            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1596                move |_, _, _| async move {
1597                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1598                        lsp::SemanticTokens {
1599                            data: vec![
1600                                0, // delta_line
1601                                3, // delta_start
1602                                4, // length
1603                                0, // token_type (function)
1604                                0, // token_modifiers_bitset
1605                            ],
1606                            result_id: None,
1607                        },
1608                    )))
1609                },
1610            );
1611
1612        cx.set_state("ˇfn main() {}");
1613        full_request.next().await;
1614        cx.run_until_parked();
1615
1616        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1617        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1618        let initial_color = initial_styles[0].color;
1619
1620        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1621        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1622        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1623        let green_color: Hsla = Rgba {
1624            r: 0.0,
1625            g: 1.0,
1626            b: 0.0,
1627            a: 1.0,
1628        }
1629        .into();
1630        cx.update(|_, cx| {
1631            SettingsStore::update_global(cx, |store, cx| {
1632                store.update_user_settings(cx, |settings| {
1633                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1634                        theme_name.clone(),
1635                        ThemeStyleContent {
1636                            syntax: IndexMap::from_iter([(
1637                                "function".to_string(),
1638                                HighlightStyleContent {
1639                                    color: Some("#00ff00".to_string()),
1640                                    background_color: None,
1641                                    font_style: None,
1642                                    font_weight: None,
1643                                },
1644                            )]),
1645                            ..ThemeStyleContent::default()
1646                        },
1647                    )]);
1648                });
1649            });
1650        });
1651
1652        cx.executor().advance_clock(Duration::from_millis(200));
1653        cx.run_until_parked();
1654
1655        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1656        assert_eq!(styles_after_override.len(), 1);
1657        assert_eq!(
1658            styles_after_override[0].color,
1659            Some(green_color),
1660            "Highlight should have green color from per-theme override"
1661        );
1662        assert_ne!(
1663            styles_after_override[0].color, initial_color,
1664            "Color should have changed from initial"
1665        );
1666    }
1667
1668    #[gpui::test]
1669    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1670        init_test(cx, |_| {});
1671
1672        update_test_language_settings(cx, &|language_settings| {
1673            language_settings.languages.0.insert(
1674                "Rust".into(),
1675                LanguageSettingsContent {
1676                    semantic_tokens: Some(SemanticTokens::Full),
1677                    ..LanguageSettingsContent::default()
1678                },
1679            );
1680        });
1681
1682        let mut cx = EditorLspTestContext::new_rust(
1683            lsp::ServerCapabilities {
1684                semantic_tokens_provider: Some(
1685                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1686                        lsp::SemanticTokensOptions {
1687                            legend: lsp::SemanticTokensLegend {
1688                                token_types: vec!["function".into()],
1689                                token_modifiers: Vec::new(),
1690                            },
1691                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1692                            ..lsp::SemanticTokensOptions::default()
1693                        },
1694                    ),
1695                ),
1696                ..lsp::ServerCapabilities::default()
1697            },
1698            cx,
1699        )
1700        .await;
1701
1702        let mut full_request = cx
1703            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1704                move |_, _, _| async move {
1705                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1706                        lsp::SemanticTokens {
1707                            data: vec![
1708                                0, // delta_line
1709                                3, // delta_start
1710                                4, // length
1711                                0, // token_type
1712                                0, // token_modifiers_bitset
1713                            ],
1714                            result_id: None,
1715                        },
1716                    )))
1717                },
1718            );
1719
1720        cx.set_state("ˇfn main() {}");
1721        assert!(full_request.next().await.is_some());
1722        cx.run_until_parked();
1723
1724        assert_eq!(
1725            extract_semantic_highlights(&cx.editor, &cx),
1726            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1727            "Semantic tokens should be present before stopping the server"
1728        );
1729
1730        cx.update_editor(|editor, _, cx| {
1731            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1732            editor.project.as_ref().unwrap().update(cx, |project, cx| {
1733                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1734            })
1735        });
1736        cx.executor().advance_clock(Duration::from_millis(200));
1737        cx.run_until_parked();
1738
1739        assert_eq!(
1740            extract_semantic_highlights(&cx.editor, &cx),
1741            Vec::new(),
1742            "Semantic tokens should be cleared after stopping the server"
1743        );
1744    }
1745
1746    #[gpui::test]
1747    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1748        init_test(cx, |_| {});
1749
1750        update_test_language_settings(cx, &|language_settings| {
1751            language_settings.languages.0.insert(
1752                "Rust".into(),
1753                LanguageSettingsContent {
1754                    semantic_tokens: Some(SemanticTokens::Full),
1755                    ..LanguageSettingsContent::default()
1756                },
1757            );
1758        });
1759
1760        let mut cx = EditorLspTestContext::new_rust(
1761            lsp::ServerCapabilities {
1762                semantic_tokens_provider: Some(
1763                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1764                        lsp::SemanticTokensOptions {
1765                            legend: lsp::SemanticTokensLegend {
1766                                token_types: vec!["function".into()],
1767                                token_modifiers: Vec::new(),
1768                            },
1769                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1770                            ..lsp::SemanticTokensOptions::default()
1771                        },
1772                    ),
1773                ),
1774                ..lsp::ServerCapabilities::default()
1775            },
1776            cx,
1777        )
1778        .await;
1779
1780        let mut full_request = cx
1781            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1782                move |_, _, _| async move {
1783                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1784                        lsp::SemanticTokens {
1785                            data: vec![
1786                                0, // delta_line
1787                                3, // delta_start
1788                                4, // length
1789                                0, // token_type
1790                                0, // token_modifiers_bitset
1791                            ],
1792                            result_id: None,
1793                        },
1794                    )))
1795                },
1796            );
1797
1798        cx.set_state("ˇfn main() {}");
1799        assert!(full_request.next().await.is_some());
1800        cx.run_until_parked();
1801
1802        assert_eq!(
1803            extract_semantic_highlights(&cx.editor, &cx),
1804            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1805            "Semantic tokens should be present before disabling the setting"
1806        );
1807
1808        update_test_language_settings(&mut cx, &|language_settings| {
1809            language_settings.languages.0.insert(
1810                "Rust".into(),
1811                LanguageSettingsContent {
1812                    semantic_tokens: Some(SemanticTokens::Off),
1813                    ..LanguageSettingsContent::default()
1814                },
1815            );
1816        });
1817        cx.executor().advance_clock(Duration::from_millis(200));
1818        cx.run_until_parked();
1819
1820        assert_eq!(
1821            extract_semantic_highlights(&cx.editor, &cx),
1822            Vec::new(),
1823            "Semantic tokens should be cleared after disabling the setting"
1824        );
1825    }
1826
1827    fn extract_semantic_highlight_styles(
1828        editor: &Entity<Editor>,
1829        cx: &TestAppContext,
1830    ) -> Vec<HighlightStyle> {
1831        editor.read_with(cx, |editor, cx| {
1832            editor
1833                .display_map
1834                .read(cx)
1835                .semantic_token_highlights
1836                .iter()
1837                .flat_map(|(_, (v, interner))| {
1838                    v.iter().map(|highlights| interner[highlights.style])
1839                })
1840                .collect()
1841        })
1842    }
1843}