semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use itertools::Itertools;
   9use language::language_settings::LanguageSettings;
  10use project::{
  11    lsp_store::{
  12        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  13        TokenType,
  14    },
  15    project_settings::ProjectSettings,
  16};
  17use settings::{
  18    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight, SemanticTokenRule,
  19    SemanticTokenRules, Settings as _,
  20};
  21use text::BufferId;
  22use theme::SyntaxTheme;
  23use ui::ActiveTheme as _;
  24
  25use crate::{
  26    Editor,
  27    actions::ToggleSemanticHighlights,
  28    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  29};
  30
  31pub(super) struct SemanticTokenState {
  32    rules: SemanticTokenRules,
  33    enabled: bool,
  34    update_task: Task<()>,
  35    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  36}
  37
  38impl SemanticTokenState {
  39    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  40        Self {
  41            rules: ProjectSettings::get_global(cx)
  42                .global_lsp_settings
  43                .semantic_token_rules
  44                .clone(),
  45            enabled,
  46            update_task: Task::ready(()),
  47            fetched_for_buffers: HashMap::default(),
  48        }
  49    }
  50
  51    pub(super) fn enabled(&self) -> bool {
  52        self.enabled
  53    }
  54
  55    pub(super) fn toggle_enabled(&mut self) {
  56        self.enabled = !self.enabled;
  57    }
  58
  59    #[cfg(test)]
  60    pub(super) fn take_update_task(&mut self) -> Task<()> {
  61        std::mem::replace(&mut self.update_task, Task::ready(()))
  62    }
  63
  64    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  65        self.fetched_for_buffers.remove(buffer_id);
  66    }
  67
  68    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  69        if new_rules != self.rules {
  70            self.rules = new_rules;
  71            true
  72        } else {
  73            false
  74        }
  75    }
  76}
  77
  78impl Editor {
  79    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  80        let Some(provider) = self.semantics_provider.as_ref() else {
  81            return false;
  82        };
  83
  84        let mut supports = false;
  85        self.buffer().update(cx, |this, cx| {
  86            this.for_each_buffer(&mut |buffer| {
  87                supports |= provider.supports_semantic_tokens(buffer, cx);
  88            });
  89        });
  90
  91        supports
  92    }
  93
  94    pub fn semantic_highlights_enabled(&self) -> bool {
  95        self.semantic_token_state.enabled()
  96    }
  97
  98    pub fn toggle_semantic_highlights(
  99        &mut self,
 100        _: &ToggleSemanticHighlights,
 101        _window: &mut gpui::Window,
 102        cx: &mut Context<Self>,
 103    ) {
 104        self.semantic_token_state.toggle_enabled();
 105        self.invalidate_semantic_tokens(None);
 106        self.refresh_semantic_tokens(None, None, cx);
 107    }
 108
 109    pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
 110        match for_buffer {
 111            Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
 112            None => self.semantic_token_state.fetched_for_buffers.clear(),
 113        }
 114    }
 115
 116    pub(super) fn refresh_semantic_tokens(
 117        &mut self,
 118        buffer_id: Option<BufferId>,
 119        for_server: Option<RefreshForServer>,
 120        cx: &mut Context<Self>,
 121    ) {
 122        if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
 123            self.invalidate_semantic_tokens(None);
 124            self.display_map.update(cx, |display_map, _| {
 125                match Arc::get_mut(&mut display_map.semantic_token_highlights) {
 126                    Some(highlights) => highlights.clear(),
 127                    None => display_map.semantic_token_highlights = Arc::new(Default::default()),
 128                };
 129            });
 130            self.semantic_token_state.update_task = Task::ready(());
 131            cx.notify();
 132            return;
 133        }
 134
 135        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 136        if for_server.is_some() {
 137            invalidate_semantic_highlights_for_buffers.extend(
 138                self.semantic_token_state
 139                    .fetched_for_buffers
 140                    .drain()
 141                    .map(|(buffer_id, _)| buffer_id),
 142            );
 143        }
 144
 145        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 146        else {
 147            return;
 148        };
 149
 150        let buffers_to_query = self
 151            .visible_buffers(cx)
 152            .into_iter()
 153            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
 154            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 155            .filter_map(|editor_buffer| {
 156                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 157                if self.registered_buffers.contains_key(&editor_buffer_id)
 158                    && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
 159                        .semantic_tokens
 160                        .enabled()
 161                {
 162                    Some((editor_buffer_id, editor_buffer))
 163                } else {
 164                    None
 165                }
 166            })
 167            .collect::<HashMap<_, _>>();
 168
 169        for buffer_with_disabled_tokens in self
 170            .display_map
 171            .read(cx)
 172            .semantic_token_highlights
 173            .keys()
 174            .copied()
 175            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 176            .filter(|buffer_id| {
 177                !self
 178                    .buffer
 179                    .read(cx)
 180                    .buffer(*buffer_id)
 181                    .is_some_and(|buffer| {
 182                        let buffer = buffer.read(cx);
 183                        LanguageSettings::for_buffer(&buffer, cx)
 184                            .semantic_tokens
 185                            .enabled()
 186                    })
 187            })
 188            .collect::<Vec<_>>()
 189        {
 190            self.semantic_token_state
 191                .invalidate_buffer(&buffer_with_disabled_tokens);
 192            self.display_map.update(cx, |display_map, _| {
 193                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 194            });
 195        }
 196
 197        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 198            cx.background_executor()
 199                .timer(Duration::from_millis(50))
 200                .await;
 201            let Some(all_semantic_tokens_task) = editor
 202                .update(cx, |editor, cx| {
 203                    buffers_to_query
 204                        .into_iter()
 205                        .filter_map(|(buffer_id, buffer)| {
 206                            let known_version = editor
 207                                .semantic_token_state
 208                                .fetched_for_buffers
 209                                .get(&buffer_id);
 210                            let query_version = buffer.read(cx).version();
 211                            if known_version.is_some_and(|known_version| {
 212                                !query_version.changed_since(known_version)
 213                            }) {
 214                                None
 215                            } else {
 216                                sema.semantic_tokens(buffer, for_server, cx).map(
 217                                    |task| async move { (buffer_id, query_version, task.await) },
 218                                )
 219                            }
 220                        })
 221                        .collect::<Vec<_>>()
 222                })
 223                .ok()
 224            else {
 225                return;
 226            };
 227
 228            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 229            editor
 230                .update(cx, |editor, cx| {
 231                    editor.display_map.update(cx, |display_map, _| {
 232                        for buffer_id in invalidate_semantic_highlights_for_buffers {
 233                            display_map.invalidate_semantic_highlights(buffer_id);
 234                            editor.semantic_token_state.invalidate_buffer(&buffer_id);
 235                        }
 236                    });
 237
 238                    if all_semantic_tokens.is_empty() {
 239                        return;
 240                    }
 241                    let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 242
 243                    for (buffer_id, query_version, tokens) in all_semantic_tokens {
 244                        let tokens = match tokens {
 245                            Ok(BufferSemanticTokens {
 246                                tokens: Some(tokens),
 247                            }) => tokens,
 248                            Ok(BufferSemanticTokens { tokens: None }) => {
 249                                editor.display_map.update(cx, |display_map, _| {
 250                                    display_map.invalidate_semantic_highlights(buffer_id);
 251                                });
 252                                continue;
 253                            }
 254                            Err(e) => {
 255                                log::error!(
 256                                    "Failed to fetch semantic tokens for buffer \
 257                                    {buffer_id:?}: {e:#}"
 258                                );
 259                                continue;
 260                            }
 261                        };
 262
 263                        match editor
 264                            .semantic_token_state
 265                            .fetched_for_buffers
 266                            .entry(buffer_id)
 267                        {
 268                            hash_map::Entry::Occupied(mut o) => {
 269                                if query_version.changed_since(o.get()) {
 270                                    o.insert(query_version);
 271                                } else {
 272                                    continue;
 273                                }
 274                            }
 275                            hash_map::Entry::Vacant(v) => {
 276                                v.insert(query_version);
 277                            }
 278                        }
 279
 280                        let language_name = editor
 281                            .buffer()
 282                            .read(cx)
 283                            .buffer(buffer_id)
 284                            .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 285
 286                        editor.display_map.update(cx, |display_map, cx| {
 287                            project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 288                                let mut token_highlights = Vec::new();
 289                                let mut interner = HighlightStyleInterner::default();
 290                                for (server_id, server_tokens) in tokens {
 291                                    let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 292                                        server_id,
 293                                        language_name.as_ref(),
 294                                        cx,
 295                                    ) else {
 296                                        continue;
 297                                    };
 298                                    let theme = cx.theme().syntax();
 299                                    token_highlights.reserve(2 * server_tokens.len());
 300                                    token_highlights.extend(buffer_into_editor_highlights(
 301                                        &server_tokens,
 302                                        stylizer,
 303                                        &multi_buffer_snapshot,
 304                                        &mut interner,
 305                                        theme,
 306                                    ));
 307                                }
 308
 309                                token_highlights.sort_by(|a, b| {
 310                                    a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 311                                });
 312                                Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
 313                                    buffer_id,
 314                                    (Arc::from(token_highlights), Arc::new(interner)),
 315                                );
 316                            });
 317                        });
 318                    }
 319
 320                    cx.notify();
 321                })
 322                .ok();
 323        });
 324    }
 325}
 326
 327fn buffer_into_editor_highlights<'a, 'b>(
 328    buffer_tokens: &'a [BufferSemanticToken],
 329    stylizer: &'a SemanticTokenStylizer,
 330    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 331    interner: &'b mut HighlightStyleInterner,
 332    theme: &'a SyntaxTheme,
 333) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 334    multi_buffer_snapshot
 335        .text_anchors_to_visible_anchors(
 336            buffer_tokens
 337                .iter()
 338                .flat_map(|token| [token.range.start, token.range.end]),
 339        )
 340        .into_iter()
 341        .tuples::<(_, _)>()
 342        .zip(buffer_tokens)
 343        .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
 344            let range = multi_buffer_start?..multi_buffer_end?;
 345            let style = convert_token(stylizer, theme, token.token_type, token.token_modifiers)?;
 346            let style = interner.intern(style);
 347            Some(SemanticTokenHighlight {
 348                range,
 349                style,
 350                token_type: token.token_type,
 351                token_modifiers: token.token_modifiers,
 352                server_id: stylizer.server_id(),
 353            })
 354        })
 355}
 356
 357fn convert_token(
 358    stylizer: &SemanticTokenStylizer,
 359    theme: &SyntaxTheme,
 360    token_type: TokenType,
 361    modifiers: u32,
 362) -> Option<HighlightStyle> {
 363    let rules = stylizer.rules_for_token(token_type)?;
 364    let filter = |rule: &&SemanticTokenRule| {
 365        rule.token_modifiers
 366            .iter()
 367            .all(|m| stylizer.has_modifier(modifiers, m))
 368    };
 369    let last = rules.last()?;
 370    if last.no_style_defined() && filter(&last) {
 371        return None;
 372    }
 373
 374    let mut highlight = HighlightStyle::default();
 375
 376    for rule in rules.into_iter().filter(filter) {
 377        let style = rule
 378            .style
 379            .iter()
 380            .find_map(|style| theme.style_for_name(style));
 381
 382        macro_rules! overwrite {
 383            (
 384                highlight.$highlight_field:ident,
 385                SemanticTokenRule::$rule_field:ident,
 386                $transform:expr $(,)?
 387            ) => {
 388                highlight.$highlight_field = rule
 389                    .$rule_field
 390                    .map($transform)
 391                    .or_else(|| style.as_ref().and_then(|s| s.$highlight_field))
 392                    .or(highlight.$highlight_field)
 393            };
 394        }
 395
 396        overwrite!(
 397            highlight.color,
 398            SemanticTokenRule::foreground_color,
 399            Into::into,
 400        );
 401
 402        overwrite!(
 403            highlight.background_color,
 404            SemanticTokenRule::background_color,
 405            Into::into,
 406        );
 407
 408        overwrite!(
 409            highlight.font_weight,
 410            SemanticTokenRule::font_weight,
 411            |w| match w {
 412                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 413                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 414            },
 415        );
 416
 417        overwrite!(
 418            highlight.font_style,
 419            SemanticTokenRule::font_style,
 420            |s| match s {
 421                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 422                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 423            },
 424        );
 425
 426        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 427            UnderlineStyle {
 428                thickness: 1.0.into(),
 429                color: match u {
 430                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 431                    SemanticTokenColorOverride::InheritForeground(false) => None,
 432                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 433                },
 434                ..UnderlineStyle::default()
 435            }
 436        });
 437
 438        overwrite!(
 439            highlight.strikethrough,
 440            SemanticTokenRule::strikethrough,
 441            |s| StrikethroughStyle {
 442                thickness: 1.0.into(),
 443                color: match s {
 444                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 445                    SemanticTokenColorOverride::InheritForeground(false) => None,
 446                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 447                },
 448            },
 449        );
 450    }
 451    Some(highlight)
 452}
 453
 454#[cfg(test)]
 455mod tests {
 456    use std::{
 457        ops::Range,
 458        sync::atomic::{self, AtomicUsize},
 459    };
 460
 461    use futures::StreamExt as _;
 462    use gpui::{
 463        AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
 464    };
 465    use language::{
 466        Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageAwareStyling, LanguageConfig,
 467        LanguageMatcher,
 468    };
 469    use languages::FakeLspAdapter;
 470    use lsp::LanguageServerId;
 471    use multi_buffer::{
 472        AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
 473    };
 474    use project::Project;
 475    use rope::{Point, PointUtf16};
 476    use serde_json::json;
 477    use settings::{
 478        GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
 479        SemanticTokens, SettingsStore,
 480    };
 481    use workspace::{MultiWorkspace, WorkspaceHandle as _};
 482
 483    use crate::{
 484        Capability,
 485        editor_tests::{init_test, update_test_language_settings},
 486        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 487    };
 488
 489    use super::*;
 490
 491    #[gpui::test]
 492    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 493        init_test(cx, |_| {});
 494
 495        update_test_language_settings(cx, &|language_settings| {
 496            language_settings.languages.0.insert(
 497                "Rust".into(),
 498                LanguageSettingsContent {
 499                    semantic_tokens: Some(SemanticTokens::Full),
 500                    ..LanguageSettingsContent::default()
 501                },
 502            );
 503        });
 504
 505        let mut cx = EditorLspTestContext::new_rust(
 506            lsp::ServerCapabilities {
 507                semantic_tokens_provider: Some(
 508                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 509                        lsp::SemanticTokensOptions {
 510                            legend: lsp::SemanticTokensLegend {
 511                                token_types: vec!["function".into()],
 512                                token_modifiers: Vec::new(),
 513                            },
 514                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 515                            ..lsp::SemanticTokensOptions::default()
 516                        },
 517                    ),
 518                ),
 519                ..lsp::ServerCapabilities::default()
 520            },
 521            cx,
 522        )
 523        .await;
 524
 525        let full_counter = Arc::new(AtomicUsize::new(0));
 526        let full_counter_clone = full_counter.clone();
 527
 528        let mut full_request = cx
 529            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 530                move |_, _, _| {
 531                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 532                    async move {
 533                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 534                            lsp::SemanticTokens {
 535                                data: vec![
 536                                    0, // delta_line
 537                                    3, // delta_start
 538                                    4, // length
 539                                    0, // token_type
 540                                    0, // token_modifiers_bitset
 541                                ],
 542                                // The server isn't capable of deltas, so even though we sent back
 543                                // a result ID, the client shouldn't request a delta.
 544                                result_id: Some("a".into()),
 545                            },
 546                        )))
 547                    }
 548                },
 549            );
 550
 551        cx.set_state("ˇfn main() {}");
 552        assert!(full_request.next().await.is_some());
 553
 554        cx.run_until_parked();
 555
 556        cx.set_state("ˇfn main() { a }");
 557        assert!(full_request.next().await.is_some());
 558
 559        cx.run_until_parked();
 560
 561        assert_eq!(
 562            extract_semantic_highlights(&cx.editor, &cx),
 563            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 564        );
 565
 566        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 567    }
 568
 569    #[gpui::test]
 570    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 571        init_test(cx, |_| {});
 572
 573        update_test_language_settings(cx, &|language_settings| {
 574            language_settings.languages.0.insert(
 575                "Rust".into(),
 576                LanguageSettingsContent {
 577                    semantic_tokens: Some(SemanticTokens::Full),
 578                    ..LanguageSettingsContent::default()
 579                },
 580            );
 581        });
 582
 583        let mut cx = EditorLspTestContext::new_rust(
 584            lsp::ServerCapabilities {
 585                semantic_tokens_provider: Some(
 586                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 587                        lsp::SemanticTokensOptions {
 588                            legend: lsp::SemanticTokensLegend {
 589                                token_types: vec!["function".into()],
 590                                token_modifiers: Vec::new(),
 591                            },
 592                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 593                            ..lsp::SemanticTokensOptions::default()
 594                        },
 595                    ),
 596                ),
 597                ..lsp::ServerCapabilities::default()
 598            },
 599            cx,
 600        )
 601        .await;
 602
 603        let full_counter = Arc::new(AtomicUsize::new(0));
 604        let full_counter_clone = full_counter.clone();
 605
 606        let mut full_request = cx
 607            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 608                move |_, _, _| {
 609                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 610                    async move {
 611                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 612                            lsp::SemanticTokens {
 613                                data: vec![
 614                                    0, // delta_line
 615                                    3, // delta_start
 616                                    4, // length
 617                                    0, // token_type
 618                                    0, // token_modifiers_bitset
 619                                ],
 620                                result_id: None, // Sending back `None` forces the client to not use deltas.
 621                            },
 622                        )))
 623                    }
 624                },
 625            );
 626
 627        cx.set_state("ˇfn main() {}");
 628        assert!(full_request.next().await.is_some());
 629
 630        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 631        task.await;
 632
 633        cx.set_state("ˇfn main() { a }");
 634        assert!(full_request.next().await.is_some());
 635
 636        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 637        task.await;
 638        assert_eq!(
 639            extract_semantic_highlights(&cx.editor, &cx),
 640            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 641        );
 642        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 643    }
 644
 645    #[gpui::test]
 646    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 647        init_test(cx, |_| {});
 648
 649        update_test_language_settings(cx, &|language_settings| {
 650            language_settings.languages.0.insert(
 651                "Rust".into(),
 652                LanguageSettingsContent {
 653                    semantic_tokens: Some(SemanticTokens::Full),
 654                    ..LanguageSettingsContent::default()
 655                },
 656            );
 657        });
 658
 659        let mut cx = EditorLspTestContext::new_rust(
 660            lsp::ServerCapabilities {
 661                semantic_tokens_provider: Some(
 662                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 663                        lsp::SemanticTokensOptions {
 664                            legend: lsp::SemanticTokensLegend {
 665                                token_types: vec!["function".into()],
 666                                token_modifiers: Vec::new(),
 667                            },
 668                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 669                            ..lsp::SemanticTokensOptions::default()
 670                        },
 671                    ),
 672                ),
 673                ..lsp::ServerCapabilities::default()
 674            },
 675            cx,
 676        )
 677        .await;
 678
 679        let full_counter = Arc::new(AtomicUsize::new(0));
 680        let full_counter_clone = full_counter.clone();
 681        let delta_counter = Arc::new(AtomicUsize::new(0));
 682        let delta_counter_clone = delta_counter.clone();
 683
 684        let mut full_request = cx
 685            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 686                move |_, _, _| {
 687                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 688                    async move {
 689                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 690                            lsp::SemanticTokens {
 691                                data: vec![
 692                                    0, // delta_line
 693                                    3, // delta_start
 694                                    4, // length
 695                                    0, // token_type
 696                                    0, // token_modifiers_bitset
 697                                ],
 698                                result_id: Some("a".into()),
 699                            },
 700                        )))
 701                    }
 702                },
 703            );
 704
 705        let mut delta_request = cx
 706            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 707                move |_, params, _| {
 708                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 709                    assert_eq!(params.previous_result_id, "a");
 710                    async move {
 711                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 712                            lsp::SemanticTokensDelta {
 713                                edits: Vec::new(),
 714                                result_id: Some("b".into()),
 715                            },
 716                        )))
 717                    }
 718                },
 719            );
 720
 721        // Initial request, for the empty buffer.
 722        cx.set_state("ˇfn main() {}");
 723        assert!(full_request.next().await.is_some());
 724        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 725        task.await;
 726
 727        cx.set_state("ˇfn main() { a }");
 728        assert!(delta_request.next().await.is_some());
 729        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 730        task.await;
 731
 732        assert_eq!(
 733            extract_semantic_highlights(&cx.editor, &cx),
 734            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 735        );
 736
 737        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 738        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 739    }
 740
 741    #[gpui::test]
 742    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 743        init_test(cx, |_| {});
 744
 745        update_test_language_settings(cx, &|language_settings| {
 746            language_settings.languages.0.insert(
 747                "TOML".into(),
 748                LanguageSettingsContent {
 749                    semantic_tokens: Some(SemanticTokens::Full),
 750                    ..LanguageSettingsContent::default()
 751                },
 752            );
 753        });
 754
 755        let toml_language = Arc::new(Language::new(
 756            LanguageConfig {
 757                name: "TOML".into(),
 758                matcher: LanguageMatcher {
 759                    path_suffixes: vec!["toml".into()],
 760                    ..LanguageMatcher::default()
 761                },
 762                ..LanguageConfig::default()
 763            },
 764            None,
 765        ));
 766
 767        // We have 2 language servers for TOML in this test.
 768        let toml_legend_1 = lsp::SemanticTokensLegend {
 769            token_types: vec!["property".into()],
 770            token_modifiers: Vec::new(),
 771        };
 772        let toml_legend_2 = lsp::SemanticTokensLegend {
 773            token_types: vec!["number".into()],
 774            token_modifiers: Vec::new(),
 775        };
 776
 777        let app_state = cx.update(workspace::AppState::test);
 778
 779        cx.update(|cx| {
 780            assets::Assets.load_test_fonts(cx);
 781            crate::init(cx);
 782            workspace::init(app_state.clone(), cx);
 783        });
 784
 785        let project = Project::test(app_state.fs.clone(), [], cx).await;
 786        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 787
 788        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 789        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 790        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 791        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 792
 793        let mut toml_server_1 = language_registry.register_fake_lsp(
 794            toml_language.name(),
 795            FakeLspAdapter {
 796                name: "toml1",
 797                capabilities: lsp::ServerCapabilities {
 798                    semantic_tokens_provider: Some(
 799                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 800                            lsp::SemanticTokensOptions {
 801                                legend: toml_legend_1,
 802                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 803                                ..lsp::SemanticTokensOptions::default()
 804                            },
 805                        ),
 806                    ),
 807                    ..lsp::ServerCapabilities::default()
 808                },
 809                initializer: Some(Box::new({
 810                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 811                    move |fake_server| {
 812                        let full_counter = full_counter_toml_1_clone.clone();
 813                        fake_server
 814                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 815                                move |_, _| {
 816                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 817                                    async move {
 818                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 819                                            lsp::SemanticTokens {
 820                                                // highlight 'a' as a property
 821                                                data: vec![
 822                                                    0, // delta_line
 823                                                    0, // delta_start
 824                                                    1, // length
 825                                                    0, // token_type
 826                                                    0, // token_modifiers_bitset
 827                                                ],
 828                                                result_id: Some("a".into()),
 829                                            },
 830                                        )))
 831                                    }
 832                                },
 833                            );
 834                    }
 835                })),
 836                ..FakeLspAdapter::default()
 837            },
 838        );
 839        let mut toml_server_2 = language_registry.register_fake_lsp(
 840            toml_language.name(),
 841            FakeLspAdapter {
 842                name: "toml2",
 843                capabilities: lsp::ServerCapabilities {
 844                    semantic_tokens_provider: Some(
 845                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 846                            lsp::SemanticTokensOptions {
 847                                legend: toml_legend_2,
 848                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 849                                ..lsp::SemanticTokensOptions::default()
 850                            },
 851                        ),
 852                    ),
 853                    ..lsp::ServerCapabilities::default()
 854                },
 855                initializer: Some(Box::new({
 856                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 857                    move |fake_server| {
 858                        let full_counter = full_counter_toml_2_clone.clone();
 859                        fake_server
 860                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 861                                move |_, _| {
 862                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 863                                    async move {
 864                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 865                                            lsp::SemanticTokens {
 866                                                // highlight '3' as a literal
 867                                                data: vec![
 868                                                    0, // delta_line
 869                                                    4, // delta_start
 870                                                    1, // length
 871                                                    0, // token_type
 872                                                    0, // token_modifiers_bitset
 873                                                ],
 874                                                result_id: Some("a".into()),
 875                                            },
 876                                        )))
 877                                    }
 878                                },
 879                            );
 880                    }
 881                })),
 882                ..FakeLspAdapter::default()
 883            },
 884        );
 885        language_registry.add(toml_language.clone());
 886
 887        app_state
 888            .fs
 889            .as_fake()
 890            .insert_tree(
 891                EditorLspTestContext::root_path(),
 892                json!({
 893                    ".git": {},
 894                    "dir": {
 895                        "foo.toml": "a = 1\nb = 2\n",
 896                    }
 897                }),
 898            )
 899            .await;
 900
 901        let (multi_workspace, cx) =
 902            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
 903        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 904        project
 905            .update(cx, |project, cx| {
 906                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 907            })
 908            .await
 909            .unwrap();
 910        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 911            .await;
 912
 913        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 914        let toml_item = workspace
 915            .update_in(cx, |workspace, window, cx| {
 916                workspace.open_path(toml_file, None, true, window, cx)
 917            })
 918            .await
 919            .expect("Could not open test file");
 920
 921        let editor = cx.update(|_, cx| {
 922            toml_item
 923                .act_as::<Editor>(cx)
 924                .expect("Opened test file wasn't an editor")
 925        });
 926
 927        editor.update_in(cx, |editor, window, cx| {
 928            let nav_history = workspace
 929                .read(cx)
 930                .active_pane()
 931                .read(cx)
 932                .nav_history_for_item(&cx.entity());
 933            editor.set_nav_history(Some(nav_history));
 934            window.focus(&editor.focus_handle(cx), cx)
 935        });
 936
 937        let _toml_server_1 = toml_server_1.next().await.unwrap();
 938        let _toml_server_2 = toml_server_2.next().await.unwrap();
 939
 940        // Trigger semantic tokens.
 941        editor.update_in(cx, |editor, _, cx| {
 942            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 943        });
 944        cx.executor().advance_clock(Duration::from_millis(200));
 945        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
 946        cx.run_until_parked();
 947        task.await;
 948
 949        assert_eq!(
 950            extract_semantic_highlights(&editor, &cx),
 951            vec![
 952                MultiBufferOffset(0)..MultiBufferOffset(1),
 953                MultiBufferOffset(4)..MultiBufferOffset(5),
 954            ]
 955        );
 956
 957        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 958        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 959    }
 960
 961    #[gpui::test]
 962    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 963        init_test(cx, |_| {});
 964
 965        update_test_language_settings(cx, &|language_settings| {
 966            language_settings.languages.0.insert(
 967                "TOML".into(),
 968                LanguageSettingsContent {
 969                    semantic_tokens: Some(SemanticTokens::Full),
 970                    ..LanguageSettingsContent::default()
 971                },
 972            );
 973            language_settings.languages.0.insert(
 974                "Rust".into(),
 975                LanguageSettingsContent {
 976                    semantic_tokens: Some(SemanticTokens::Full),
 977                    ..LanguageSettingsContent::default()
 978                },
 979            );
 980        });
 981
 982        let toml_language = Arc::new(Language::new(
 983            LanguageConfig {
 984                name: "TOML".into(),
 985                matcher: LanguageMatcher {
 986                    path_suffixes: vec!["toml".into()],
 987                    ..LanguageMatcher::default()
 988                },
 989                ..LanguageConfig::default()
 990            },
 991            None,
 992        ));
 993        let rust_language = Arc::new(Language::new(
 994            LanguageConfig {
 995                name: "Rust".into(),
 996                matcher: LanguageMatcher {
 997                    path_suffixes: vec!["rs".into()],
 998                    ..LanguageMatcher::default()
 999                },
1000                ..LanguageConfig::default()
1001            },
1002            None,
1003        ));
1004
1005        let toml_legend = lsp::SemanticTokensLegend {
1006            token_types: vec!["property".into()],
1007            token_modifiers: Vec::new(),
1008        };
1009        let rust_legend = lsp::SemanticTokensLegend {
1010            token_types: vec!["constant".into()],
1011            token_modifiers: Vec::new(),
1012        };
1013
1014        let app_state = cx.update(workspace::AppState::test);
1015
1016        cx.update(|cx| {
1017            assets::Assets.load_test_fonts(cx);
1018            crate::init(cx);
1019            workspace::init(app_state.clone(), cx);
1020        });
1021
1022        let project = Project::test(app_state.fs.clone(), [], cx).await;
1023        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1024        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1025        let full_counter_toml_clone = full_counter_toml.clone();
1026
1027        let mut toml_server = language_registry.register_fake_lsp(
1028            toml_language.name(),
1029            FakeLspAdapter {
1030                name: "toml",
1031                capabilities: lsp::ServerCapabilities {
1032                    semantic_tokens_provider: Some(
1033                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1034                            lsp::SemanticTokensOptions {
1035                                legend: toml_legend,
1036                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1037                                ..lsp::SemanticTokensOptions::default()
1038                            },
1039                        ),
1040                    ),
1041                    ..lsp::ServerCapabilities::default()
1042                },
1043                initializer: Some(Box::new({
1044                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1045                    move |fake_server| {
1046                        let full_counter = full_counter_toml_clone.clone();
1047                        fake_server
1048                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1049                                move |_, _| {
1050                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1051                                    async move {
1052                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1053                                            lsp::SemanticTokens {
1054                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1055                                                data: vec![
1056                                                    0, // delta_line (line 0)
1057                                                    0, // delta_start
1058                                                    1, // length
1059                                                    0, // token_type
1060                                                    0, // token_modifiers_bitset
1061                                                    1, // delta_line (line 1)
1062                                                    0, // delta_start
1063                                                    1, // length
1064                                                    0, // token_type
1065                                                    0, // token_modifiers_bitset
1066                                                    1, // delta_line (line 2)
1067                                                    0, // delta_start
1068                                                    1, // length
1069                                                    0, // token_type
1070                                                    0, // token_modifiers_bitset
1071                                                ],
1072                                                result_id: Some("a".into()),
1073                                            },
1074                                        )))
1075                                    }
1076                                },
1077                            );
1078                    }
1079                })),
1080                ..FakeLspAdapter::default()
1081            },
1082        );
1083        language_registry.add(toml_language.clone());
1084        let mut rust_server = language_registry.register_fake_lsp(
1085            rust_language.name(),
1086            FakeLspAdapter {
1087                name: "rust",
1088                capabilities: lsp::ServerCapabilities {
1089                    semantic_tokens_provider: Some(
1090                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1091                            lsp::SemanticTokensOptions {
1092                                legend: rust_legend,
1093                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1094                                ..lsp::SemanticTokensOptions::default()
1095                            },
1096                        ),
1097                    ),
1098                    ..lsp::ServerCapabilities::default()
1099                },
1100                ..FakeLspAdapter::default()
1101            },
1102        );
1103        language_registry.add(rust_language.clone());
1104
1105        app_state
1106            .fs
1107            .as_fake()
1108            .insert_tree(
1109                EditorLspTestContext::root_path(),
1110                json!({
1111                    ".git": {},
1112                    "dir": {
1113                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1114                        "bar.rs": "const c: usize = 3;\n",
1115                    }
1116                }),
1117            )
1118            .await;
1119
1120        let (multi_workspace, cx) =
1121            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1122        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1123        project
1124            .update(cx, |project, cx| {
1125                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1126            })
1127            .await
1128            .unwrap();
1129        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1130            .await;
1131
1132        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1133        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1134        let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1135            (
1136                workspace.open_path(toml_file, None, true, window, cx),
1137                workspace.open_path(rust_file, None, true, window, cx),
1138            )
1139        });
1140        let toml_item = toml_item.await.expect("Could not open test file");
1141        let rust_item = rust_item.await.expect("Could not open test file");
1142
1143        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1144            (
1145                toml_item
1146                    .act_as::<Editor>(cx)
1147                    .expect("Opened test file wasn't an editor"),
1148                rust_item
1149                    .act_as::<Editor>(cx)
1150                    .expect("Opened test file wasn't an editor"),
1151            )
1152        });
1153        let toml_buffer = cx.read(|cx| {
1154            toml_editor
1155                .read(cx)
1156                .buffer()
1157                .read(cx)
1158                .as_singleton()
1159                .unwrap()
1160        });
1161        let rust_buffer = cx.read(|cx| {
1162            rust_editor
1163                .read(cx)
1164                .buffer()
1165                .read(cx)
1166                .as_singleton()
1167                .unwrap()
1168        });
1169        let multibuffer = cx.new(|cx| {
1170            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1171            multibuffer.set_excerpts_for_path(
1172                PathKey::sorted(0),
1173                toml_buffer.clone(),
1174                [Point::new(0, 0)..Point::new(0, 4)],
1175                0,
1176                cx,
1177            );
1178            multibuffer.set_excerpts_for_path(
1179                PathKey::sorted(1),
1180                rust_buffer.clone(),
1181                [Point::new(0, 0)..Point::new(0, 4)],
1182                0,
1183                cx,
1184            );
1185            multibuffer
1186        });
1187
1188        let editor = workspace.update_in(cx, |workspace, window, cx| {
1189            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1190            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1191            editor
1192        });
1193        editor.update_in(cx, |editor, window, cx| {
1194            let nav_history = workspace
1195                .read(cx)
1196                .active_pane()
1197                .read(cx)
1198                .nav_history_for_item(&cx.entity());
1199            editor.set_nav_history(Some(nav_history));
1200            window.focus(&editor.focus_handle(cx), cx)
1201        });
1202
1203        let _toml_server = toml_server.next().await.unwrap();
1204        let _rust_server = rust_server.next().await.unwrap();
1205
1206        // Initial request.
1207        cx.executor().advance_clock(Duration::from_millis(200));
1208        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1209        cx.run_until_parked();
1210        task.await;
1211        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1212        cx.run_until_parked();
1213
1214        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1215        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1216        assert_eq!(
1217            extract_semantic_highlights(&editor, &cx),
1218            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1219        );
1220
1221        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1222        let toml_anchor = editor.read_with(cx, |editor, cx| {
1223            editor
1224                .buffer()
1225                .read(cx)
1226                .snapshot(cx)
1227                .anchor_in_excerpt(text::Anchor::min_for_buffer(
1228                    toml_buffer.read(cx).remote_id(),
1229                ))
1230                .unwrap()
1231        });
1232        editor.update_in(cx, |editor, _, cx| {
1233            editor.buffer().update(cx, |buffer, cx| {
1234                buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
1235            });
1236        });
1237
1238        // Wait for semantic tokens to be re-fetched after expansion.
1239        cx.executor().advance_clock(Duration::from_millis(200));
1240        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1241        cx.run_until_parked();
1242        task.await;
1243
1244        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1245        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1246        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1247        assert_eq!(
1248            extract_semantic_highlights(&editor, &cx),
1249            vec![
1250                MultiBufferOffset(0)..MultiBufferOffset(1),
1251                MultiBufferOffset(6)..MultiBufferOffset(7),
1252                MultiBufferOffset(12)..MultiBufferOffset(13),
1253            ]
1254        );
1255    }
1256
1257    #[gpui::test]
1258    async fn lsp_semantic_tokens_singleton_opened_from_multibuffer(cx: &mut TestAppContext) {
1259        init_test(cx, |_| {});
1260
1261        update_test_language_settings(cx, &|language_settings| {
1262            language_settings.languages.0.insert(
1263                "Rust".into(),
1264                LanguageSettingsContent {
1265                    semantic_tokens: Some(SemanticTokens::Full),
1266                    ..LanguageSettingsContent::default()
1267                },
1268            );
1269        });
1270
1271        let rust_language = Arc::new(Language::new(
1272            LanguageConfig {
1273                name: "Rust".into(),
1274                matcher: LanguageMatcher {
1275                    path_suffixes: vec!["rs".into()],
1276                    ..LanguageMatcher::default()
1277                },
1278                ..LanguageConfig::default()
1279            },
1280            None,
1281        ));
1282
1283        let rust_legend = lsp::SemanticTokensLegend {
1284            token_types: vec!["function".into()],
1285            token_modifiers: Vec::new(),
1286        };
1287
1288        let app_state = cx.update(workspace::AppState::test);
1289        cx.update(|cx| {
1290            assets::Assets.load_test_fonts(cx);
1291            crate::init(cx);
1292            workspace::init(app_state.clone(), cx);
1293        });
1294
1295        let project = Project::test(app_state.fs.clone(), [], cx).await;
1296        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1297
1298        let mut rust_server = language_registry.register_fake_lsp(
1299            rust_language.name(),
1300            FakeLspAdapter {
1301                name: "rust",
1302                capabilities: lsp::ServerCapabilities {
1303                    semantic_tokens_provider: Some(
1304                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1305                            lsp::SemanticTokensOptions {
1306                                legend: rust_legend,
1307                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1308                                ..lsp::SemanticTokensOptions::default()
1309                            },
1310                        ),
1311                    ),
1312                    ..lsp::ServerCapabilities::default()
1313                },
1314                initializer: Some(Box::new(move |fake_server| {
1315                    fake_server
1316                        .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1317                            move |_, _| async move {
1318                                Ok(Some(lsp::SemanticTokensResult::Tokens(
1319                                    lsp::SemanticTokens {
1320                                        data: vec![0, 3, 4, 0, 0],
1321                                        result_id: None,
1322                                    },
1323                                )))
1324                            },
1325                        );
1326                })),
1327                ..FakeLspAdapter::default()
1328            },
1329        );
1330        language_registry.add(rust_language.clone());
1331
1332        // foo.rs must be long enough that autoscroll triggers an actual scroll
1333        // position change when opening from the multibuffer with cursor near
1334        // the end. This reproduces the race: set_visible_line_count spawns a
1335        // task, then autoscroll fires ScrollPositionChanged whose handler
1336        // replaces post_scroll_update with a debounced task that skips
1337        // update_lsp_data for singletons.
1338        let mut foo_content = String::from("fn test() {}\n");
1339        for i in 0..100 {
1340            foo_content.push_str(&format!("fn func_{i}() {{}}\n"));
1341        }
1342
1343        app_state
1344            .fs
1345            .as_fake()
1346            .insert_tree(
1347                EditorLspTestContext::root_path(),
1348                json!({
1349                    ".git": {},
1350                    "bar.rs": "fn main() {}\n",
1351                    "foo.rs": foo_content,
1352                }),
1353            )
1354            .await;
1355
1356        let (multi_workspace, cx) =
1357            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1358        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1359        project
1360            .update(cx, |project, cx| {
1361                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1362            })
1363            .await
1364            .unwrap();
1365        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1366            .await;
1367
1368        // Open bar.rs as an editor to start the LSP server.
1369        let bar_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1370        let bar_item = workspace
1371            .update_in(cx, |workspace, window, cx| {
1372                workspace.open_path(bar_file, None, true, window, cx)
1373            })
1374            .await
1375            .expect("Could not open bar.rs");
1376        let bar_editor = cx.update(|_, cx| {
1377            bar_item
1378                .act_as::<Editor>(cx)
1379                .expect("Opened test file wasn't an editor")
1380        });
1381        let bar_buffer = cx.read(|cx| {
1382            bar_editor
1383                .read(cx)
1384                .buffer()
1385                .read(cx)
1386                .as_singleton()
1387                .unwrap()
1388        });
1389
1390        let _rust_server = rust_server.next().await.unwrap();
1391
1392        cx.executor().advance_clock(Duration::from_millis(200));
1393        let task = bar_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1394        cx.run_until_parked();
1395        task.await;
1396        cx.run_until_parked();
1397
1398        assert!(
1399            !extract_semantic_highlights(&bar_editor, &cx).is_empty(),
1400            "bar.rs should have semantic tokens after initial open"
1401        );
1402
1403        // Get foo.rs buffer directly from the project. No editor has ever
1404        // fetched semantic tokens for this buffer.
1405        let foo_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1406        let foo_buffer = project
1407            .update(cx, |project, cx| project.open_buffer(foo_file, cx))
1408            .await
1409            .expect("Could not open foo.rs buffer");
1410
1411        // Build a multibuffer with both files. The foo.rs excerpt covers a
1412        // range near the end of the file so that opening the singleton will
1413        // autoscroll to a position that requires changing scroll_position.
1414        let multibuffer = cx.new(|cx| {
1415            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1416            multibuffer.set_excerpts_for_path(
1417                PathKey::sorted(0),
1418                bar_buffer.clone(),
1419                [Point::new(0, 0)..Point::new(0, 12)],
1420                0,
1421                cx,
1422            );
1423            multibuffer.set_excerpts_for_path(
1424                PathKey::sorted(1),
1425                foo_buffer.clone(),
1426                [Point::new(95, 0)..Point::new(100, 0)],
1427                0,
1428                cx,
1429            );
1430            multibuffer
1431        });
1432
1433        let mb_editor = workspace.update_in(cx, |workspace, window, cx| {
1434            let editor =
1435                cx.new(|cx| build_editor_with_project(project.clone(), multibuffer, window, cx));
1436            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1437            editor
1438        });
1439        mb_editor.update_in(cx, |editor, window, cx| {
1440            let nav_history = workspace
1441                .read(cx)
1442                .active_pane()
1443                .read(cx)
1444                .nav_history_for_item(&cx.entity());
1445            editor.set_nav_history(Some(nav_history));
1446            window.focus(&editor.focus_handle(cx), cx)
1447        });
1448
1449        // Close bar.rs tab so only the multibuffer remains.
1450        workspace
1451            .update_in(cx, |workspace, window, cx| {
1452                let pane = workspace.active_pane().clone();
1453                pane.update(cx, |pane, cx| {
1454                    pane.close_item_by_id(
1455                        bar_editor.entity_id(),
1456                        workspace::SaveIntent::Skip,
1457                        window,
1458                        cx,
1459                    )
1460                })
1461            })
1462            .await
1463            .ok();
1464
1465        cx.run_until_parked();
1466
1467        // Position cursor in the foo.rs excerpt (near line 95+).
1468        mb_editor.update_in(cx, |editor, window, cx| {
1469            let snapshot = editor.display_snapshot(cx);
1470            let end = snapshot.buffer_snapshot().len();
1471            editor.change_selections(None.into(), window, cx, |s| {
1472                s.select_ranges([end..end]);
1473            });
1474        });
1475
1476        // Open the singleton from the multibuffer. open_buffers_in_workspace
1477        // creates the editor and calls change_selections with autoscroll.
1478        // During render, set_visible_line_count fires first (spawning a task),
1479        // then autoscroll_vertically scrolls to line ~95 which emits
1480        // ScrollPositionChanged, whose handler replaces post_scroll_update.
1481        mb_editor.update_in(cx, |editor, window, cx| {
1482            editor.open_excerpts(&crate::actions::OpenExcerpts, window, cx);
1483        });
1484
1485        cx.run_until_parked();
1486        cx.executor().advance_clock(Duration::from_millis(200));
1487        cx.run_until_parked();
1488
1489        let active_editor = workspace.read_with(cx, |workspace, cx| {
1490            workspace
1491                .active_item(cx)
1492                .and_then(|item| item.act_as::<Editor>(cx))
1493                .expect("Active item should be an editor")
1494        });
1495
1496        assert!(
1497            active_editor.read_with(cx, |editor, cx| editor.buffer().read(cx).is_singleton()),
1498            "Active editor should be a singleton buffer"
1499        );
1500
1501        // Wait for semantic tokens on the singleton.
1502        cx.executor().advance_clock(Duration::from_millis(200));
1503        let task = active_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1504        task.await;
1505        cx.run_until_parked();
1506
1507        let highlights = extract_semantic_highlights(&active_editor, &cx);
1508        assert!(
1509            !highlights.is_empty(),
1510            "Singleton editor opened from multibuffer should have semantic tokens"
1511        );
1512    }
1513
1514    fn extract_semantic_highlights(
1515        editor: &Entity<Editor>,
1516        cx: &TestAppContext,
1517    ) -> Vec<Range<MultiBufferOffset>> {
1518        editor.read_with(cx, |editor, cx| {
1519            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1520            editor
1521                .display_map
1522                .read(cx)
1523                .semantic_token_highlights
1524                .iter()
1525                .flat_map(|(_, (v, _))| v.iter())
1526                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1527                .collect()
1528        })
1529    }
1530
1531    #[gpui::test]
1532    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1533        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1534        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1535
1536        init_test(cx, |_| {});
1537
1538        update_test_language_settings(cx, &|language_settings| {
1539            language_settings.languages.0.insert(
1540                "Rust".into(),
1541                LanguageSettingsContent {
1542                    semantic_tokens: Some(SemanticTokens::Full),
1543                    ..LanguageSettingsContent::default()
1544                },
1545            );
1546        });
1547
1548        let mut cx = EditorLspTestContext::new_rust(
1549            lsp::ServerCapabilities {
1550                semantic_tokens_provider: Some(
1551                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1552                        lsp::SemanticTokensOptions {
1553                            legend: lsp::SemanticTokensLegend {
1554                                token_types: Vec::from(["function".into()]),
1555                                token_modifiers: Vec::new(),
1556                            },
1557                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1558                            ..lsp::SemanticTokensOptions::default()
1559                        },
1560                    ),
1561                ),
1562                ..lsp::ServerCapabilities::default()
1563            },
1564            cx,
1565        )
1566        .await;
1567
1568        let mut full_request = cx
1569            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1570                move |_, _, _| {
1571                    async move {
1572                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1573                            lsp::SemanticTokens {
1574                                data: vec![
1575                                    0, // delta_line
1576                                    3, // delta_start
1577                                    4, // length
1578                                    0, // token_type (function)
1579                                    0, // token_modifiers_bitset
1580                                ],
1581                                result_id: None,
1582                            },
1583                        )))
1584                    }
1585                },
1586            );
1587
1588        // Trigger initial semantic tokens fetch
1589        cx.set_state("ˇfn main() {}");
1590        full_request.next().await;
1591        cx.run_until_parked();
1592
1593        // Verify initial highlights exist (with no custom color yet)
1594        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1595        assert_eq!(
1596            initial_ranges,
1597            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1598            "Should have initial semantic token highlights"
1599        );
1600        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1601        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1602        // Initial color should be None or theme default (not red or blue)
1603        let initial_color = initial_styles[0].color;
1604
1605        // Set a custom foreground color for function tokens via settings.json
1606        let red_color = Rgba {
1607            r: 1.0,
1608            g: 0.0,
1609            b: 0.0,
1610            a: 1.0,
1611        };
1612        cx.update(|_, cx| {
1613            SettingsStore::update_global(cx, |store, cx| {
1614                store.update_user_settings(cx, |settings| {
1615                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1616                        semantic_token_rules: Some(SemanticTokenRules {
1617                            rules: Vec::from([SemanticTokenRule {
1618                                token_type: Some("function".to_string()),
1619                                foreground_color: Some(red_color),
1620                                ..SemanticTokenRule::default()
1621                            }]),
1622                        }),
1623                        ..GlobalLspSettingsContent::default()
1624                    });
1625                });
1626            });
1627        });
1628
1629        // Trigger a refetch by making an edit (which forces semantic tokens update)
1630        cx.set_state("ˇfn main() { }");
1631        full_request.next().await;
1632        cx.run_until_parked();
1633
1634        // Verify the highlights now have the custom red color
1635        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1636        assert_eq!(
1637            styles_after_settings_change.len(),
1638            1,
1639            "Should still have one highlight"
1640        );
1641        assert_eq!(
1642            styles_after_settings_change[0].color,
1643            Some(Hsla::from(red_color)),
1644            "Highlight should have the custom red color from settings.json"
1645        );
1646        assert_ne!(
1647            styles_after_settings_change[0].color, initial_color,
1648            "Color should have changed from initial"
1649        );
1650    }
1651
1652    #[gpui::test]
1653    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1654        use collections::IndexMap;
1655        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1656        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1657
1658        init_test(cx, |_| {});
1659
1660        update_test_language_settings(cx, &|language_settings| {
1661            language_settings.languages.0.insert(
1662                "Rust".into(),
1663                LanguageSettingsContent {
1664                    semantic_tokens: Some(SemanticTokens::Full),
1665                    ..LanguageSettingsContent::default()
1666                },
1667            );
1668        });
1669
1670        let mut cx = EditorLspTestContext::new_rust(
1671            lsp::ServerCapabilities {
1672                semantic_tokens_provider: Some(
1673                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1674                        lsp::SemanticTokensOptions {
1675                            legend: lsp::SemanticTokensLegend {
1676                                token_types: Vec::from(["function".into()]),
1677                                token_modifiers: Vec::new(),
1678                            },
1679                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1680                            ..lsp::SemanticTokensOptions::default()
1681                        },
1682                    ),
1683                ),
1684                ..lsp::ServerCapabilities::default()
1685            },
1686            cx,
1687        )
1688        .await;
1689
1690        let mut full_request = cx
1691            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1692                move |_, _, _| async move {
1693                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1694                        lsp::SemanticTokens {
1695                            data: vec![
1696                                0, // delta_line
1697                                3, // delta_start
1698                                4, // length
1699                                0, // token_type (function)
1700                                0, // token_modifiers_bitset
1701                            ],
1702                            result_id: None,
1703                        },
1704                    )))
1705                },
1706            );
1707
1708        cx.set_state("ˇfn main() {}");
1709        full_request.next().await;
1710        cx.run_until_parked();
1711
1712        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1713        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1714        let initial_color = initial_styles[0].color;
1715
1716        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1717        // which fires theme_changed → refresh_semantic_token_highlights.
1718        let red_color: Hsla = Rgba {
1719            r: 1.0,
1720            g: 0.0,
1721            b: 0.0,
1722            a: 1.0,
1723        }
1724        .into();
1725        cx.update(|_, cx| {
1726            SettingsStore::update_global(cx, |store, cx| {
1727                store.update_user_settings(cx, |settings| {
1728                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1729                        syntax: IndexMap::from_iter([(
1730                            "function".to_string(),
1731                            HighlightStyleContent {
1732                                color: Some("#ff0000".to_string()),
1733                                background_color: None,
1734                                font_style: None,
1735                                font_weight: None,
1736                            },
1737                        )]),
1738                        ..ThemeStyleContent::default()
1739                    });
1740                });
1741            });
1742        });
1743
1744        cx.executor().advance_clock(Duration::from_millis(200));
1745        cx.run_until_parked();
1746
1747        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1748        assert_eq!(styles_after_override.len(), 1);
1749        assert_eq!(
1750            styles_after_override[0].color,
1751            Some(red_color),
1752            "Highlight should have red color from theme override"
1753        );
1754        assert_ne!(
1755            styles_after_override[0].color, initial_color,
1756            "Color should have changed from initial"
1757        );
1758
1759        // Changing the override to a different color also restyles.
1760        let blue_color: Hsla = Rgba {
1761            r: 0.0,
1762            g: 0.0,
1763            b: 1.0,
1764            a: 1.0,
1765        }
1766        .into();
1767        cx.update(|_, cx| {
1768            SettingsStore::update_global(cx, |store, cx| {
1769                store.update_user_settings(cx, |settings| {
1770                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1771                        syntax: IndexMap::from_iter([(
1772                            "function".to_string(),
1773                            HighlightStyleContent {
1774                                color: Some("#0000ff".to_string()),
1775                                background_color: None,
1776                                font_style: None,
1777                                font_weight: None,
1778                            },
1779                        )]),
1780                        ..ThemeStyleContent::default()
1781                    });
1782                });
1783            });
1784        });
1785
1786        cx.executor().advance_clock(Duration::from_millis(200));
1787        cx.run_until_parked();
1788
1789        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1790        assert_eq!(styles_after_second_override.len(), 1);
1791        assert_eq!(
1792            styles_after_second_override[0].color,
1793            Some(blue_color),
1794            "Highlight should have blue color from updated theme override"
1795        );
1796
1797        // Removing overrides reverts to the original theme color.
1798        cx.update(|_, cx| {
1799            SettingsStore::update_global(cx, |store, cx| {
1800                store.update_user_settings(cx, |settings| {
1801                    settings.theme.experimental_theme_overrides = None;
1802                });
1803            });
1804        });
1805
1806        cx.executor().advance_clock(Duration::from_millis(200));
1807        cx.run_until_parked();
1808
1809        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1810        assert_eq!(styles_after_clear.len(), 1);
1811        assert_eq!(
1812            styles_after_clear[0].color, initial_color,
1813            "Highlight should revert to initial color after clearing overrides"
1814        );
1815    }
1816
1817    #[gpui::test]
1818    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1819        use collections::IndexMap;
1820        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1821        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1822        use ui::ActiveTheme as _;
1823
1824        init_test(cx, |_| {});
1825
1826        update_test_language_settings(cx, &|language_settings| {
1827            language_settings.languages.0.insert(
1828                "Rust".into(),
1829                LanguageSettingsContent {
1830                    semantic_tokens: Some(SemanticTokens::Full),
1831                    ..LanguageSettingsContent::default()
1832                },
1833            );
1834        });
1835
1836        let mut cx = EditorLspTestContext::new_rust(
1837            lsp::ServerCapabilities {
1838                semantic_tokens_provider: Some(
1839                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1840                        lsp::SemanticTokensOptions {
1841                            legend: lsp::SemanticTokensLegend {
1842                                token_types: Vec::from(["function".into()]),
1843                                token_modifiers: Vec::new(),
1844                            },
1845                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1846                            ..lsp::SemanticTokensOptions::default()
1847                        },
1848                    ),
1849                ),
1850                ..lsp::ServerCapabilities::default()
1851            },
1852            cx,
1853        )
1854        .await;
1855
1856        let mut full_request = cx
1857            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1858                move |_, _, _| async move {
1859                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1860                        lsp::SemanticTokens {
1861                            data: vec![
1862                                0, // delta_line
1863                                3, // delta_start
1864                                4, // length
1865                                0, // token_type (function)
1866                                0, // token_modifiers_bitset
1867                            ],
1868                            result_id: None,
1869                        },
1870                    )))
1871                },
1872            );
1873
1874        cx.set_state("ˇfn main() {}");
1875        full_request.next().await;
1876        cx.run_until_parked();
1877
1878        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1879        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1880        let initial_color = initial_styles[0].color;
1881
1882        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1883        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1884        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1885        let green_color: Hsla = Rgba {
1886            r: 0.0,
1887            g: 1.0,
1888            b: 0.0,
1889            a: 1.0,
1890        }
1891        .into();
1892        cx.update(|_, cx| {
1893            SettingsStore::update_global(cx, |store, cx| {
1894                store.update_user_settings(cx, |settings| {
1895                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1896                        theme_name.clone(),
1897                        ThemeStyleContent {
1898                            syntax: IndexMap::from_iter([(
1899                                "function".to_string(),
1900                                HighlightStyleContent {
1901                                    color: Some("#00ff00".to_string()),
1902                                    background_color: None,
1903                                    font_style: None,
1904                                    font_weight: None,
1905                                },
1906                            )]),
1907                            ..ThemeStyleContent::default()
1908                        },
1909                    )]);
1910                });
1911            });
1912        });
1913
1914        cx.executor().advance_clock(Duration::from_millis(200));
1915        cx.run_until_parked();
1916
1917        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1918        assert_eq!(styles_after_override.len(), 1);
1919        assert_eq!(
1920            styles_after_override[0].color,
1921            Some(green_color),
1922            "Highlight should have green color from per-theme override"
1923        );
1924        assert_ne!(
1925            styles_after_override[0].color, initial_color,
1926            "Color should have changed from initial"
1927        );
1928    }
1929
1930    #[gpui::test]
1931    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1932        init_test(cx, |_| {});
1933
1934        update_test_language_settings(cx, &|language_settings| {
1935            language_settings.languages.0.insert(
1936                "Rust".into(),
1937                LanguageSettingsContent {
1938                    semantic_tokens: Some(SemanticTokens::Full),
1939                    ..LanguageSettingsContent::default()
1940                },
1941            );
1942        });
1943
1944        let mut cx = EditorLspTestContext::new_rust(
1945            lsp::ServerCapabilities {
1946                semantic_tokens_provider: Some(
1947                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1948                        lsp::SemanticTokensOptions {
1949                            legend: lsp::SemanticTokensLegend {
1950                                token_types: vec!["function".into()],
1951                                token_modifiers: Vec::new(),
1952                            },
1953                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1954                            ..lsp::SemanticTokensOptions::default()
1955                        },
1956                    ),
1957                ),
1958                ..lsp::ServerCapabilities::default()
1959            },
1960            cx,
1961        )
1962        .await;
1963
1964        let mut full_request = cx
1965            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1966                move |_, _, _| async move {
1967                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1968                        lsp::SemanticTokens {
1969                            data: vec![
1970                                0, // delta_line
1971                                3, // delta_start
1972                                4, // length
1973                                0, // token_type
1974                                0, // token_modifiers_bitset
1975                            ],
1976                            result_id: None,
1977                        },
1978                    )))
1979                },
1980            );
1981
1982        cx.set_state("ˇfn main() {}");
1983        assert!(full_request.next().await.is_some());
1984        cx.run_until_parked();
1985
1986        assert_eq!(
1987            extract_semantic_highlights(&cx.editor, &cx),
1988            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1989            "Semantic tokens should be present before stopping the server"
1990        );
1991
1992        cx.update_editor(|editor, _, cx| {
1993            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1994            editor.project.as_ref().unwrap().update(cx, |project, cx| {
1995                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1996            })
1997        });
1998        cx.executor().advance_clock(Duration::from_millis(200));
1999        cx.run_until_parked();
2000
2001        assert_eq!(
2002            extract_semantic_highlights(&cx.editor, &cx),
2003            Vec::new(),
2004            "Semantic tokens should be cleared after stopping the server"
2005        );
2006    }
2007
2008    #[gpui::test]
2009    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
2010        init_test(cx, |_| {});
2011
2012        update_test_language_settings(cx, &|language_settings| {
2013            language_settings.languages.0.insert(
2014                "Rust".into(),
2015                LanguageSettingsContent {
2016                    semantic_tokens: Some(SemanticTokens::Full),
2017                    ..LanguageSettingsContent::default()
2018                },
2019            );
2020        });
2021
2022        let mut cx = EditorLspTestContext::new_rust(
2023            lsp::ServerCapabilities {
2024                semantic_tokens_provider: Some(
2025                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2026                        lsp::SemanticTokensOptions {
2027                            legend: lsp::SemanticTokensLegend {
2028                                token_types: vec!["function".into()],
2029                                token_modifiers: Vec::new(),
2030                            },
2031                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2032                            ..lsp::SemanticTokensOptions::default()
2033                        },
2034                    ),
2035                ),
2036                ..lsp::ServerCapabilities::default()
2037            },
2038            cx,
2039        )
2040        .await;
2041
2042        let mut full_request = cx
2043            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2044                move |_, _, _| async move {
2045                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2046                        lsp::SemanticTokens {
2047                            data: vec![
2048                                0, // delta_line
2049                                3, // delta_start
2050                                4, // length
2051                                0, // token_type
2052                                0, // token_modifiers_bitset
2053                            ],
2054                            result_id: None,
2055                        },
2056                    )))
2057                },
2058            );
2059
2060        cx.set_state("ˇfn main() {}");
2061        assert!(full_request.next().await.is_some());
2062        cx.run_until_parked();
2063
2064        assert_eq!(
2065            extract_semantic_highlights(&cx.editor, &cx),
2066            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
2067            "Semantic tokens should be present before disabling the setting"
2068        );
2069
2070        update_test_language_settings(&mut cx, &|language_settings| {
2071            language_settings.languages.0.insert(
2072                "Rust".into(),
2073                LanguageSettingsContent {
2074                    semantic_tokens: Some(SemanticTokens::Off),
2075                    ..LanguageSettingsContent::default()
2076                },
2077            );
2078        });
2079        cx.executor().advance_clock(Duration::from_millis(200));
2080        cx.run_until_parked();
2081
2082        assert_eq!(
2083            extract_semantic_highlights(&cx.editor, &cx),
2084            Vec::new(),
2085            "Semantic tokens should be cleared after disabling the setting"
2086        );
2087    }
2088
2089    #[gpui::test]
2090    async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
2091        init_test(cx, |_| {});
2092        update_test_language_settings(cx, &|s| {
2093            s.languages.0.insert(
2094                "Rust".into(),
2095                LanguageSettingsContent {
2096                    semantic_tokens: Some(SemanticTokens::Full),
2097                    ..Default::default()
2098                },
2099            );
2100        });
2101
2102        let mut cx = EditorLspTestContext::new_rust(
2103            lsp::ServerCapabilities {
2104                semantic_tokens_provider: Some(
2105                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2106                        lsp::SemanticTokensOptions {
2107                            legend: lsp::SemanticTokensLegend {
2108                                token_types: vec!["function".into()],
2109                                token_modifiers: vec![],
2110                            },
2111                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2112                            ..Default::default()
2113                        },
2114                    ),
2115                ),
2116                ..Default::default()
2117            },
2118            cx,
2119        )
2120        .await;
2121
2122        let mut full_request = cx
2123            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2124                move |_, _, _| async move {
2125                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2126                        lsp::SemanticTokens {
2127                            data: vec![0, 3, 4, 0, 0],
2128                            result_id: None,
2129                        },
2130                    )))
2131                },
2132            );
2133
2134        // Verify it highlights by default
2135        cx.set_state("ˇfn main() {}");
2136        full_request.next().await;
2137        cx.run_until_parked();
2138        assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
2139
2140        // Apply EMPTY rule to disable it
2141        cx.update(|_, cx| {
2142            SettingsStore::update_global(cx, |store, cx| {
2143                store.update_user_settings(cx, |settings| {
2144                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2145                        semantic_token_rules: Some(SemanticTokenRules {
2146                            rules: vec![SemanticTokenRule {
2147                                token_type: Some("function".to_string()),
2148                                ..Default::default()
2149                            }],
2150                        }),
2151                        ..Default::default()
2152                    });
2153                });
2154            });
2155        });
2156
2157        cx.set_state("ˇfn main() { }");
2158        full_request.next().await;
2159        cx.run_until_parked();
2160
2161        assert!(
2162            extract_semantic_highlights(&cx.editor, &cx).is_empty(),
2163            "Highlighting should be disabled by empty style setting"
2164        );
2165    }
2166
2167    #[gpui::test]
2168    async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
2169        init_test(cx, |_| {});
2170        update_test_language_settings(cx, &|s| {
2171            s.languages.0.insert(
2172                "Rust".into(),
2173                LanguageSettingsContent {
2174                    semantic_tokens: Some(SemanticTokens::Full),
2175                    ..Default::default()
2176                },
2177            );
2178        });
2179
2180        let mut cx = EditorLspTestContext::new_rust(
2181            lsp::ServerCapabilities {
2182                semantic_tokens_provider: Some(
2183                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2184                        lsp::SemanticTokensOptions {
2185                            legend: lsp::SemanticTokensLegend {
2186                                token_types: vec!["comment".into()],
2187                                token_modifiers: vec!["documentation".into()],
2188                            },
2189                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2190                            ..Default::default()
2191                        },
2192                    ),
2193                ),
2194                ..Default::default()
2195            },
2196            cx,
2197        )
2198        .await;
2199
2200        let mut full_request = cx
2201            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2202                move |_, _, _| async move {
2203                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2204                        lsp::SemanticTokens {
2205                            data: vec![0, 0, 5, 0, 1], // comment [documentation]
2206                            result_id: None,
2207                        },
2208                    )))
2209                },
2210            );
2211
2212        cx.set_state("ˇ/// d\n");
2213        full_request.next().await;
2214        cx.run_until_parked();
2215        assert_eq!(
2216            extract_semantic_highlights(&cx.editor, &cx).len(),
2217            1,
2218            "Documentation comment should be highlighted"
2219        );
2220
2221        // Apply a BROAD empty rule for "comment" (no modifiers)
2222        cx.update(|_, cx| {
2223            SettingsStore::update_global(cx, |store, cx| {
2224                store.update_user_settings(cx, |settings| {
2225                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2226                        semantic_token_rules: Some(SemanticTokenRules {
2227                            rules: vec![SemanticTokenRule {
2228                                token_type: Some("comment".to_string()),
2229                                ..Default::default()
2230                            }],
2231                        }),
2232                        ..Default::default()
2233                    });
2234                });
2235            });
2236        });
2237
2238        cx.set_state("ˇ/// d\n");
2239        full_request.next().await;
2240        cx.run_until_parked();
2241
2242        assert!(
2243            extract_semantic_highlights(&cx.editor, &cx).is_empty(),
2244            "Broad empty rule should disable specific documentation comment"
2245        );
2246    }
2247
2248    #[gpui::test]
2249    async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
2250        cx: &mut TestAppContext,
2251    ) {
2252        use gpui::UpdateGlobal as _;
2253        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
2254
2255        init_test(cx, |_| {});
2256        update_test_language_settings(cx, &|s| {
2257            s.languages.0.insert(
2258                "Rust".into(),
2259                LanguageSettingsContent {
2260                    semantic_tokens: Some(SemanticTokens::Full),
2261                    ..Default::default()
2262                },
2263            );
2264        });
2265
2266        let mut cx = EditorLspTestContext::new_rust(
2267            lsp::ServerCapabilities {
2268                semantic_tokens_provider: Some(
2269                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2270                        lsp::SemanticTokensOptions {
2271                            legend: lsp::SemanticTokensLegend {
2272                                token_types: vec!["comment".into()],
2273                                token_modifiers: vec!["documentation".into()],
2274                            },
2275                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2276                            ..Default::default()
2277                        },
2278                    ),
2279                ),
2280                ..Default::default()
2281            },
2282            cx,
2283        )
2284        .await;
2285
2286        let mut full_request = cx
2287            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2288                move |_, _, _| async move {
2289                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2290                        lsp::SemanticTokens {
2291                            data: vec![
2292                                0, 0, 5, 0, 1, // comment [documentation]
2293                                1, 0, 5, 0, 0, // normal comment
2294                            ],
2295                            result_id: None,
2296                        },
2297                    )))
2298                },
2299            );
2300
2301        cx.set_state("ˇ/// d\n// n\n");
2302        full_request.next().await;
2303        cx.run_until_parked();
2304        assert_eq!(
2305            extract_semantic_highlights(&cx.editor, &cx).len(),
2306            2,
2307            "Both documentation and normal comments should be highlighted initially"
2308        );
2309
2310        // Apply a SPECIFIC empty rule for documentation only
2311        cx.update(|_, cx| {
2312            SettingsStore::update_global(cx, |store, cx| {
2313                store.update_user_settings(cx, |settings| {
2314                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2315                        semantic_token_rules: Some(SemanticTokenRules {
2316                            rules: vec![SemanticTokenRule {
2317                                token_type: Some("comment".to_string()),
2318                                token_modifiers: vec!["documentation".to_string()],
2319                                ..Default::default()
2320                            }],
2321                        }),
2322                        ..Default::default()
2323                    });
2324                });
2325            });
2326        });
2327
2328        cx.set_state("ˇ/// d\n// n\n");
2329        full_request.next().await;
2330        cx.run_until_parked();
2331
2332        assert_eq!(
2333            extract_semantic_highlights(&cx.editor, &cx).len(),
2334            1,
2335            "Normal comment should still be highlighted (matched by default rule)"
2336        );
2337    }
2338
2339    #[gpui::test]
2340    async fn test_diagnostics_visible_when_semantic_token_set_to_full(cx: &mut TestAppContext) {
2341        init_test(cx, |_| {});
2342
2343        update_test_language_settings(cx, &|language_settings| {
2344            language_settings.languages.0.insert(
2345                "Rust".into(),
2346                LanguageSettingsContent {
2347                    semantic_tokens: Some(SemanticTokens::Full),
2348                    ..LanguageSettingsContent::default()
2349                },
2350            );
2351        });
2352
2353        let mut cx = EditorLspTestContext::new_rust(
2354            lsp::ServerCapabilities {
2355                semantic_tokens_provider: Some(
2356                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2357                        lsp::SemanticTokensOptions {
2358                            legend: lsp::SemanticTokensLegend {
2359                                token_types: vec!["function".into()],
2360                                token_modifiers: Vec::new(),
2361                            },
2362                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2363                            ..lsp::SemanticTokensOptions::default()
2364                        },
2365                    ),
2366                ),
2367                ..lsp::ServerCapabilities::default()
2368            },
2369            cx,
2370        )
2371        .await;
2372
2373        let mut full_request = cx
2374            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2375                move |_, _, _| {
2376                    async move {
2377                        Ok(Some(lsp::SemanticTokensResult::Tokens(
2378                            lsp::SemanticTokens {
2379                                data: vec![
2380                                    0, // delta_line
2381                                    3, // delta_start
2382                                    4, // length
2383                                    0, // token_type
2384                                    0, // token_modifiers_bitset
2385                                ],
2386                                result_id: Some("a".into()),
2387                            },
2388                        )))
2389                    }
2390                },
2391            );
2392
2393        cx.set_state("ˇfn main() {}");
2394        assert!(full_request.next().await.is_some());
2395
2396        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
2397        task.await;
2398
2399        cx.update_buffer(|buffer, cx| {
2400            buffer.update_diagnostics(
2401                LanguageServerId(0),
2402                DiagnosticSet::new(
2403                    [DiagnosticEntry {
2404                        range: PointUtf16::new(0, 3)..PointUtf16::new(0, 7),
2405                        diagnostic: Diagnostic {
2406                            severity: lsp::DiagnosticSeverity::ERROR,
2407                            group_id: 1,
2408                            message: "unused function".into(),
2409                            ..Default::default()
2410                        },
2411                    }],
2412                    buffer,
2413                ),
2414                cx,
2415            )
2416        });
2417
2418        cx.run_until_parked();
2419        let chunks = cx.update_editor(|editor, window, cx| {
2420            editor
2421                .snapshot(window, cx)
2422                .display_snapshot
2423                .chunks(
2424                    crate::display_map::DisplayRow(0)..crate::display_map::DisplayRow(1),
2425                    LanguageAwareStyling {
2426                        tree_sitter: false,
2427                        diagnostics: true,
2428                    },
2429                    crate::HighlightStyles::default(),
2430                )
2431                .map(|chunk| {
2432                    (
2433                        chunk.text.to_string(),
2434                        chunk.diagnostic_severity,
2435                        chunk.highlight_style,
2436                    )
2437                })
2438                .collect::<Vec<_>>()
2439        });
2440
2441        assert_eq!(
2442            extract_semantic_highlights(&cx.editor, &cx),
2443            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
2444        );
2445
2446        assert!(
2447            chunks.iter().any(
2448                |(text, severity, style): &(
2449                    String,
2450                    Option<lsp::DiagnosticSeverity>,
2451                    Option<gpui::HighlightStyle>
2452                )| {
2453                    text == "main"
2454                        && *severity == Some(lsp::DiagnosticSeverity::ERROR)
2455                        && style.is_some()
2456                }
2457            ),
2458            "expected 'main' chunk to have both diagnostic and semantic styling: {:?}",
2459            chunks
2460        );
2461    }
2462
2463    fn extract_semantic_highlight_styles(
2464        editor: &Entity<Editor>,
2465        cx: &TestAppContext,
2466    ) -> Vec<HighlightStyle> {
2467        editor.read_with(cx, |editor, cx| {
2468            editor
2469                .display_map
2470                .read(cx)
2471                .semantic_token_highlights
2472                .iter()
2473                .flat_map(|(_, (v, interner))| {
2474                    v.iter().map(|highlights| interner[highlights.style])
2475                })
2476                .collect()
2477        })
2478    }
2479}