semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use itertools::Itertools;
   9use language::language_settings::LanguageSettings;
  10use project::{
  11    lsp_store::{
  12        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  13        TokenType,
  14    },
  15    project_settings::ProjectSettings,
  16};
  17use settings::{
  18    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
  19    SemanticTokenRules, Settings as _,
  20};
  21use text::BufferId;
  22use theme::SyntaxTheme;
  23use ui::ActiveTheme as _;
  24
  25use crate::{
  26    Editor,
  27    actions::ToggleSemanticHighlights,
  28    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  29};
  30
  31pub(super) struct SemanticTokenState {
  32    rules: SemanticTokenRules,
  33    enabled: bool,
  34    update_task: Task<()>,
  35    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  36}
  37
  38impl SemanticTokenState {
  39    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  40        Self {
  41            rules: ProjectSettings::get_global(cx)
  42                .global_lsp_settings
  43                .semantic_token_rules
  44                .clone(),
  45            enabled,
  46            update_task: Task::ready(()),
  47            fetched_for_buffers: HashMap::default(),
  48        }
  49    }
  50
  51    pub(super) fn enabled(&self) -> bool {
  52        self.enabled
  53    }
  54
  55    pub(super) fn toggle_enabled(&mut self) {
  56        self.enabled = !self.enabled;
  57    }
  58
  59    #[cfg(test)]
  60    pub(super) fn take_update_task(&mut self) -> Task<()> {
  61        std::mem::replace(&mut self.update_task, Task::ready(()))
  62    }
  63
  64    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  65        self.fetched_for_buffers.remove(buffer_id);
  66    }
  67
  68    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  69        if new_rules != self.rules {
  70            self.rules = new_rules;
  71            true
  72        } else {
  73            false
  74        }
  75    }
  76}
  77
  78impl Editor {
  79    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  80        let Some(provider) = self.semantics_provider.as_ref() else {
  81            return false;
  82        };
  83
  84        let mut supports = false;
  85        self.buffer().update(cx, |this, cx| {
  86            this.for_each_buffer(&mut |buffer| {
  87                supports |= provider.supports_semantic_tokens(buffer, cx);
  88            });
  89        });
  90
  91        supports
  92    }
  93
  94    pub fn semantic_highlights_enabled(&self) -> bool {
  95        self.semantic_token_state.enabled()
  96    }
  97
  98    pub fn toggle_semantic_highlights(
  99        &mut self,
 100        _: &ToggleSemanticHighlights,
 101        _window: &mut gpui::Window,
 102        cx: &mut Context<Self>,
 103    ) {
 104        self.semantic_token_state.toggle_enabled();
 105        self.invalidate_semantic_tokens(None);
 106        self.refresh_semantic_tokens(None, None, cx);
 107    }
 108
 109    pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
 110        match for_buffer {
 111            Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
 112            None => self.semantic_token_state.fetched_for_buffers.clear(),
 113        }
 114    }
 115
 116    pub(super) fn refresh_semantic_tokens(
 117        &mut self,
 118        buffer_id: Option<BufferId>,
 119        for_server: Option<RefreshForServer>,
 120        cx: &mut Context<Self>,
 121    ) {
 122        if !self.lsp_data_enabled() || !self.semantic_token_state.enabled() {
 123            self.invalidate_semantic_tokens(None);
 124            self.display_map.update(cx, |display_map, _| {
 125                match Arc::get_mut(&mut display_map.semantic_token_highlights) {
 126                    Some(highlights) => highlights.clear(),
 127                    None => display_map.semantic_token_highlights = Arc::new(Default::default()),
 128                };
 129            });
 130            self.semantic_token_state.update_task = Task::ready(());
 131            cx.notify();
 132            return;
 133        }
 134
 135        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 136        if for_server.is_some() {
 137            invalidate_semantic_highlights_for_buffers.extend(
 138                self.semantic_token_state
 139                    .fetched_for_buffers
 140                    .drain()
 141                    .map(|(buffer_id, _)| buffer_id),
 142            );
 143        }
 144
 145        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 146        else {
 147            return;
 148        };
 149
 150        let buffers_to_query = self
 151            .visible_buffers(cx)
 152            .into_iter()
 153            .filter(|buffer| self.is_lsp_relevant(buffer.read(cx).file(), cx))
 154            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 155            .filter_map(|editor_buffer| {
 156                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 157                if self.registered_buffers.contains_key(&editor_buffer_id)
 158                    && LanguageSettings::for_buffer(editor_buffer.read(cx), cx)
 159                        .semantic_tokens
 160                        .enabled()
 161                {
 162                    Some((editor_buffer_id, editor_buffer))
 163                } else {
 164                    None
 165                }
 166            })
 167            .collect::<HashMap<_, _>>();
 168
 169        for buffer_with_disabled_tokens in self
 170            .display_map
 171            .read(cx)
 172            .semantic_token_highlights
 173            .keys()
 174            .copied()
 175            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 176            .filter(|buffer_id| {
 177                !self
 178                    .buffer
 179                    .read(cx)
 180                    .buffer(*buffer_id)
 181                    .is_some_and(|buffer| {
 182                        let buffer = buffer.read(cx);
 183                        LanguageSettings::for_buffer(&buffer, cx)
 184                            .semantic_tokens
 185                            .enabled()
 186                    })
 187            })
 188            .collect::<Vec<_>>()
 189        {
 190            self.semantic_token_state
 191                .invalidate_buffer(&buffer_with_disabled_tokens);
 192            self.display_map.update(cx, |display_map, _| {
 193                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 194            });
 195        }
 196
 197        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 198            cx.background_executor()
 199                .timer(Duration::from_millis(50))
 200                .await;
 201            let Some(all_semantic_tokens_task) = editor
 202                .update(cx, |editor, cx| {
 203                    buffers_to_query
 204                        .into_iter()
 205                        .filter_map(|(buffer_id, buffer)| {
 206                            let known_version = editor
 207                                .semantic_token_state
 208                                .fetched_for_buffers
 209                                .get(&buffer_id);
 210                            let query_version = buffer.read(cx).version();
 211                            if known_version.is_some_and(|known_version| {
 212                                !query_version.changed_since(known_version)
 213                            }) {
 214                                None
 215                            } else {
 216                                sema.semantic_tokens(buffer, for_server, cx).map(
 217                                    |task| async move { (buffer_id, query_version, task.await) },
 218                                )
 219                            }
 220                        })
 221                        .collect::<Vec<_>>()
 222                })
 223                .ok()
 224            else {
 225                return;
 226            };
 227
 228            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 229            editor
 230                .update(cx, |editor, cx| {
 231                    editor.display_map.update(cx, |display_map, _| {
 232                        for buffer_id in invalidate_semantic_highlights_for_buffers {
 233                            display_map.invalidate_semantic_highlights(buffer_id);
 234                            editor.semantic_token_state.invalidate_buffer(&buffer_id);
 235                        }
 236                    });
 237
 238                    if all_semantic_tokens.is_empty() {
 239                        return;
 240                    }
 241                    let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 242
 243                    for (buffer_id, query_version, tokens) in all_semantic_tokens {
 244                        let tokens = match tokens {
 245                            Ok(BufferSemanticTokens {
 246                                tokens: Some(tokens),
 247                            }) => tokens,
 248                            Ok(BufferSemanticTokens { tokens: None }) => {
 249                                editor.display_map.update(cx, |display_map, _| {
 250                                    display_map.invalidate_semantic_highlights(buffer_id);
 251                                });
 252                                continue;
 253                            }
 254                            Err(e) => {
 255                                log::error!(
 256                                    "Failed to fetch semantic tokens for buffer \
 257                                    {buffer_id:?}: {e:#}"
 258                                );
 259                                continue;
 260                            }
 261                        };
 262
 263                        match editor
 264                            .semantic_token_state
 265                            .fetched_for_buffers
 266                            .entry(buffer_id)
 267                        {
 268                            hash_map::Entry::Occupied(mut o) => {
 269                                if query_version.changed_since(o.get()) {
 270                                    o.insert(query_version);
 271                                } else {
 272                                    continue;
 273                                }
 274                            }
 275                            hash_map::Entry::Vacant(v) => {
 276                                v.insert(query_version);
 277                            }
 278                        }
 279
 280                        let language_name = editor
 281                            .buffer()
 282                            .read(cx)
 283                            .buffer(buffer_id)
 284                            .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 285
 286                        editor.display_map.update(cx, |display_map, cx| {
 287                            project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 288                                let mut token_highlights = Vec::new();
 289                                let mut interner = HighlightStyleInterner::default();
 290                                for (server_id, server_tokens) in tokens {
 291                                    let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 292                                        server_id,
 293                                        language_name.as_ref(),
 294                                        cx,
 295                                    ) else {
 296                                        continue;
 297                                    };
 298                                    token_highlights.reserve(2 * server_tokens.len());
 299                                    token_highlights.extend(buffer_into_editor_highlights(
 300                                        &server_tokens,
 301                                        stylizer,
 302                                        &multi_buffer_snapshot,
 303                                        &mut interner,
 304                                        cx,
 305                                    ));
 306                                }
 307
 308                                token_highlights.sort_by(|a, b| {
 309                                    a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 310                                });
 311                                Arc::make_mut(&mut display_map.semantic_token_highlights).insert(
 312                                    buffer_id,
 313                                    (Arc::from(token_highlights), Arc::new(interner)),
 314                                );
 315                            });
 316                        });
 317                    }
 318
 319                    cx.notify();
 320                })
 321                .ok();
 322        });
 323    }
 324}
 325
 326fn buffer_into_editor_highlights<'a, 'b>(
 327    buffer_tokens: &'a [BufferSemanticToken],
 328    stylizer: &'a SemanticTokenStylizer,
 329    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 330    interner: &'b mut HighlightStyleInterner,
 331    cx: &'a App,
 332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 333    multi_buffer_snapshot
 334        .text_anchors_to_visible_anchors(
 335            buffer_tokens
 336                .iter()
 337                .flat_map(|token| [token.range.start, token.range.end]),
 338        )
 339        .into_iter()
 340        .tuples::<(_, _)>()
 341        .zip(buffer_tokens)
 342        .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
 343            let range = multi_buffer_start?..multi_buffer_end?;
 344            let style = convert_token(
 345                stylizer,
 346                cx.theme().syntax(),
 347                token.token_type,
 348                token.token_modifiers,
 349            )?;
 350            let style = interner.intern(style);
 351            Some(SemanticTokenHighlight {
 352                range,
 353                style,
 354                token_type: token.token_type,
 355                token_modifiers: token.token_modifiers,
 356                server_id: stylizer.server_id(),
 357            })
 358        })
 359}
 360
 361fn convert_token(
 362    stylizer: &SemanticTokenStylizer,
 363    theme: &SyntaxTheme,
 364    token_type: TokenType,
 365    modifiers: u32,
 366) -> Option<HighlightStyle> {
 367    let rules = stylizer.rules_for_token(token_type)?;
 368    let matching: Vec<_> = rules
 369        .iter()
 370        .filter(|rule| {
 371            rule.token_modifiers
 372                .iter()
 373                .all(|m| stylizer.has_modifier(modifiers, m))
 374        })
 375        .collect();
 376
 377    if let Some(rule) = matching.last() {
 378        if rule.no_style_defined() {
 379            return None;
 380        }
 381    }
 382
 383    let mut highlight = HighlightStyle::default();
 384    let mut empty = true;
 385
 386    for rule in matching {
 387        empty = false;
 388
 389        let style = rule
 390            .style
 391            .iter()
 392            .find_map(|style| theme.style_for_name(style));
 393
 394        macro_rules! overwrite {
 395            (
 396                highlight.$highlight_field:ident,
 397                SemanticTokenRule::$rule_field:ident,
 398                $transform:expr $(,)?
 399            ) => {
 400                highlight.$highlight_field = rule
 401                    .$rule_field
 402                    .map($transform)
 403                    .or_else(|| style.and_then(|s| s.$highlight_field))
 404                    .or(highlight.$highlight_field)
 405            };
 406        }
 407
 408        overwrite!(
 409            highlight.color,
 410            SemanticTokenRule::foreground_color,
 411            Into::into,
 412        );
 413
 414        overwrite!(
 415            highlight.background_color,
 416            SemanticTokenRule::background_color,
 417            Into::into,
 418        );
 419
 420        overwrite!(
 421            highlight.font_weight,
 422            SemanticTokenRule::font_weight,
 423            |w| match w {
 424                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 425                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 426            },
 427        );
 428
 429        overwrite!(
 430            highlight.font_style,
 431            SemanticTokenRule::font_style,
 432            |s| match s {
 433                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 434                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 435            },
 436        );
 437
 438        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 439            UnderlineStyle {
 440                thickness: 1.0.into(),
 441                color: match u {
 442                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 443                    SemanticTokenColorOverride::InheritForeground(false) => None,
 444                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 445                },
 446                ..UnderlineStyle::default()
 447            }
 448        });
 449
 450        overwrite!(
 451            highlight.strikethrough,
 452            SemanticTokenRule::strikethrough,
 453            |s| StrikethroughStyle {
 454                thickness: 1.0.into(),
 455                color: match s {
 456                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 457                    SemanticTokenColorOverride::InheritForeground(false) => None,
 458                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 459                },
 460            },
 461        );
 462    }
 463
 464    if empty { None } else { Some(highlight) }
 465}
 466
 467#[cfg(test)]
 468mod tests {
 469    use std::{
 470        ops::Range,
 471        sync::atomic::{self, AtomicUsize},
 472    };
 473
 474    use futures::StreamExt as _;
 475    use gpui::{
 476        AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, UpdateGlobal as _,
 477    };
 478    use language::{
 479        Diagnostic, DiagnosticEntry, DiagnosticSet, Language, LanguageAwareStyling, LanguageConfig,
 480        LanguageMatcher,
 481    };
 482    use languages::FakeLspAdapter;
 483    use lsp::LanguageServerId;
 484    use multi_buffer::{
 485        AnchorRangeExt, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset, PathKey,
 486    };
 487    use project::Project;
 488    use rope::{Point, PointUtf16};
 489    use serde_json::json;
 490    use settings::{
 491        GlobalLspSettingsContent, LanguageSettingsContent, SemanticTokenRule, SemanticTokenRules,
 492        SemanticTokens, SettingsStore,
 493    };
 494    use workspace::{MultiWorkspace, WorkspaceHandle as _};
 495
 496    use crate::{
 497        Capability,
 498        editor_tests::{init_test, update_test_language_settings},
 499        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 500    };
 501
 502    use super::*;
 503
 504    #[gpui::test]
 505    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 506        init_test(cx, |_| {});
 507
 508        update_test_language_settings(cx, &|language_settings| {
 509            language_settings.languages.0.insert(
 510                "Rust".into(),
 511                LanguageSettingsContent {
 512                    semantic_tokens: Some(SemanticTokens::Full),
 513                    ..LanguageSettingsContent::default()
 514                },
 515            );
 516        });
 517
 518        let mut cx = EditorLspTestContext::new_rust(
 519            lsp::ServerCapabilities {
 520                semantic_tokens_provider: Some(
 521                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 522                        lsp::SemanticTokensOptions {
 523                            legend: lsp::SemanticTokensLegend {
 524                                token_types: vec!["function".into()],
 525                                token_modifiers: Vec::new(),
 526                            },
 527                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 528                            ..lsp::SemanticTokensOptions::default()
 529                        },
 530                    ),
 531                ),
 532                ..lsp::ServerCapabilities::default()
 533            },
 534            cx,
 535        )
 536        .await;
 537
 538        let full_counter = Arc::new(AtomicUsize::new(0));
 539        let full_counter_clone = full_counter.clone();
 540
 541        let mut full_request = cx
 542            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 543                move |_, _, _| {
 544                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 545                    async move {
 546                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 547                            lsp::SemanticTokens {
 548                                data: vec![
 549                                    0, // delta_line
 550                                    3, // delta_start
 551                                    4, // length
 552                                    0, // token_type
 553                                    0, // token_modifiers_bitset
 554                                ],
 555                                // The server isn't capable of deltas, so even though we sent back
 556                                // a result ID, the client shouldn't request a delta.
 557                                result_id: Some("a".into()),
 558                            },
 559                        )))
 560                    }
 561                },
 562            );
 563
 564        cx.set_state("ˇfn main() {}");
 565        assert!(full_request.next().await.is_some());
 566
 567        cx.run_until_parked();
 568
 569        cx.set_state("ˇfn main() { a }");
 570        assert!(full_request.next().await.is_some());
 571
 572        cx.run_until_parked();
 573
 574        assert_eq!(
 575            extract_semantic_highlights(&cx.editor, &cx),
 576            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 577        );
 578
 579        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 580    }
 581
 582    #[gpui::test]
 583    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 584        init_test(cx, |_| {});
 585
 586        update_test_language_settings(cx, &|language_settings| {
 587            language_settings.languages.0.insert(
 588                "Rust".into(),
 589                LanguageSettingsContent {
 590                    semantic_tokens: Some(SemanticTokens::Full),
 591                    ..LanguageSettingsContent::default()
 592                },
 593            );
 594        });
 595
 596        let mut cx = EditorLspTestContext::new_rust(
 597            lsp::ServerCapabilities {
 598                semantic_tokens_provider: Some(
 599                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 600                        lsp::SemanticTokensOptions {
 601                            legend: lsp::SemanticTokensLegend {
 602                                token_types: vec!["function".into()],
 603                                token_modifiers: Vec::new(),
 604                            },
 605                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 606                            ..lsp::SemanticTokensOptions::default()
 607                        },
 608                    ),
 609                ),
 610                ..lsp::ServerCapabilities::default()
 611            },
 612            cx,
 613        )
 614        .await;
 615
 616        let full_counter = Arc::new(AtomicUsize::new(0));
 617        let full_counter_clone = full_counter.clone();
 618
 619        let mut full_request = cx
 620            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 621                move |_, _, _| {
 622                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 623                    async move {
 624                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 625                            lsp::SemanticTokens {
 626                                data: vec![
 627                                    0, // delta_line
 628                                    3, // delta_start
 629                                    4, // length
 630                                    0, // token_type
 631                                    0, // token_modifiers_bitset
 632                                ],
 633                                result_id: None, // Sending back `None` forces the client to not use deltas.
 634                            },
 635                        )))
 636                    }
 637                },
 638            );
 639
 640        cx.set_state("ˇfn main() {}");
 641        assert!(full_request.next().await.is_some());
 642
 643        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 644        task.await;
 645
 646        cx.set_state("ˇfn main() { a }");
 647        assert!(full_request.next().await.is_some());
 648
 649        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 650        task.await;
 651        assert_eq!(
 652            extract_semantic_highlights(&cx.editor, &cx),
 653            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 654        );
 655        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 656    }
 657
 658    #[gpui::test]
 659    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 660        init_test(cx, |_| {});
 661
 662        update_test_language_settings(cx, &|language_settings| {
 663            language_settings.languages.0.insert(
 664                "Rust".into(),
 665                LanguageSettingsContent {
 666                    semantic_tokens: Some(SemanticTokens::Full),
 667                    ..LanguageSettingsContent::default()
 668                },
 669            );
 670        });
 671
 672        let mut cx = EditorLspTestContext::new_rust(
 673            lsp::ServerCapabilities {
 674                semantic_tokens_provider: Some(
 675                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 676                        lsp::SemanticTokensOptions {
 677                            legend: lsp::SemanticTokensLegend {
 678                                token_types: vec!["function".into()],
 679                                token_modifiers: Vec::new(),
 680                            },
 681                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 682                            ..lsp::SemanticTokensOptions::default()
 683                        },
 684                    ),
 685                ),
 686                ..lsp::ServerCapabilities::default()
 687            },
 688            cx,
 689        )
 690        .await;
 691
 692        let full_counter = Arc::new(AtomicUsize::new(0));
 693        let full_counter_clone = full_counter.clone();
 694        let delta_counter = Arc::new(AtomicUsize::new(0));
 695        let delta_counter_clone = delta_counter.clone();
 696
 697        let mut full_request = cx
 698            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 699                move |_, _, _| {
 700                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 701                    async move {
 702                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 703                            lsp::SemanticTokens {
 704                                data: vec![
 705                                    0, // delta_line
 706                                    3, // delta_start
 707                                    4, // length
 708                                    0, // token_type
 709                                    0, // token_modifiers_bitset
 710                                ],
 711                                result_id: Some("a".into()),
 712                            },
 713                        )))
 714                    }
 715                },
 716            );
 717
 718        let mut delta_request = cx
 719            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 720                move |_, params, _| {
 721                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 722                    assert_eq!(params.previous_result_id, "a");
 723                    async move {
 724                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 725                            lsp::SemanticTokensDelta {
 726                                edits: Vec::new(),
 727                                result_id: Some("b".into()),
 728                            },
 729                        )))
 730                    }
 731                },
 732            );
 733
 734        // Initial request, for the empty buffer.
 735        cx.set_state("ˇfn main() {}");
 736        assert!(full_request.next().await.is_some());
 737        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 738        task.await;
 739
 740        cx.set_state("ˇfn main() { a }");
 741        assert!(delta_request.next().await.is_some());
 742        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 743        task.await;
 744
 745        assert_eq!(
 746            extract_semantic_highlights(&cx.editor, &cx),
 747            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 748        );
 749
 750        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 751        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 752    }
 753
 754    #[gpui::test]
 755    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 756        init_test(cx, |_| {});
 757
 758        update_test_language_settings(cx, &|language_settings| {
 759            language_settings.languages.0.insert(
 760                "TOML".into(),
 761                LanguageSettingsContent {
 762                    semantic_tokens: Some(SemanticTokens::Full),
 763                    ..LanguageSettingsContent::default()
 764                },
 765            );
 766        });
 767
 768        let toml_language = Arc::new(Language::new(
 769            LanguageConfig {
 770                name: "TOML".into(),
 771                matcher: LanguageMatcher {
 772                    path_suffixes: vec!["toml".into()],
 773                    ..LanguageMatcher::default()
 774                },
 775                ..LanguageConfig::default()
 776            },
 777            None,
 778        ));
 779
 780        // We have 2 language servers for TOML in this test.
 781        let toml_legend_1 = lsp::SemanticTokensLegend {
 782            token_types: vec!["property".into()],
 783            token_modifiers: Vec::new(),
 784        };
 785        let toml_legend_2 = lsp::SemanticTokensLegend {
 786            token_types: vec!["number".into()],
 787            token_modifiers: Vec::new(),
 788        };
 789
 790        let app_state = cx.update(workspace::AppState::test);
 791
 792        cx.update(|cx| {
 793            assets::Assets.load_test_fonts(cx);
 794            crate::init(cx);
 795            workspace::init(app_state.clone(), cx);
 796        });
 797
 798        let project = Project::test(app_state.fs.clone(), [], cx).await;
 799        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 800
 801        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 802        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 803        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 804        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 805
 806        let mut toml_server_1 = language_registry.register_fake_lsp(
 807            toml_language.name(),
 808            FakeLspAdapter {
 809                name: "toml1",
 810                capabilities: lsp::ServerCapabilities {
 811                    semantic_tokens_provider: Some(
 812                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 813                            lsp::SemanticTokensOptions {
 814                                legend: toml_legend_1,
 815                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 816                                ..lsp::SemanticTokensOptions::default()
 817                            },
 818                        ),
 819                    ),
 820                    ..lsp::ServerCapabilities::default()
 821                },
 822                initializer: Some(Box::new({
 823                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 824                    move |fake_server| {
 825                        let full_counter = full_counter_toml_1_clone.clone();
 826                        fake_server
 827                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 828                                move |_, _| {
 829                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 830                                    async move {
 831                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 832                                            lsp::SemanticTokens {
 833                                                // highlight 'a' as a property
 834                                                data: vec![
 835                                                    0, // delta_line
 836                                                    0, // delta_start
 837                                                    1, // length
 838                                                    0, // token_type
 839                                                    0, // token_modifiers_bitset
 840                                                ],
 841                                                result_id: Some("a".into()),
 842                                            },
 843                                        )))
 844                                    }
 845                                },
 846                            );
 847                    }
 848                })),
 849                ..FakeLspAdapter::default()
 850            },
 851        );
 852        let mut toml_server_2 = language_registry.register_fake_lsp(
 853            toml_language.name(),
 854            FakeLspAdapter {
 855                name: "toml2",
 856                capabilities: lsp::ServerCapabilities {
 857                    semantic_tokens_provider: Some(
 858                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 859                            lsp::SemanticTokensOptions {
 860                                legend: toml_legend_2,
 861                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 862                                ..lsp::SemanticTokensOptions::default()
 863                            },
 864                        ),
 865                    ),
 866                    ..lsp::ServerCapabilities::default()
 867                },
 868                initializer: Some(Box::new({
 869                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 870                    move |fake_server| {
 871                        let full_counter = full_counter_toml_2_clone.clone();
 872                        fake_server
 873                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 874                                move |_, _| {
 875                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 876                                    async move {
 877                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 878                                            lsp::SemanticTokens {
 879                                                // highlight '3' as a literal
 880                                                data: vec![
 881                                                    0, // delta_line
 882                                                    4, // delta_start
 883                                                    1, // length
 884                                                    0, // token_type
 885                                                    0, // token_modifiers_bitset
 886                                                ],
 887                                                result_id: Some("a".into()),
 888                                            },
 889                                        )))
 890                                    }
 891                                },
 892                            );
 893                    }
 894                })),
 895                ..FakeLspAdapter::default()
 896            },
 897        );
 898        language_registry.add(toml_language.clone());
 899
 900        app_state
 901            .fs
 902            .as_fake()
 903            .insert_tree(
 904                EditorLspTestContext::root_path(),
 905                json!({
 906                    ".git": {},
 907                    "dir": {
 908                        "foo.toml": "a = 1\nb = 2\n",
 909                    }
 910                }),
 911            )
 912            .await;
 913
 914        let (multi_workspace, cx) =
 915            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
 916        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 917        project
 918            .update(cx, |project, cx| {
 919                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 920            })
 921            .await
 922            .unwrap();
 923        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 924            .await;
 925
 926        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 927        let toml_item = workspace
 928            .update_in(cx, |workspace, window, cx| {
 929                workspace.open_path(toml_file, None, true, window, cx)
 930            })
 931            .await
 932            .expect("Could not open test file");
 933
 934        let editor = cx.update(|_, cx| {
 935            toml_item
 936                .act_as::<Editor>(cx)
 937                .expect("Opened test file wasn't an editor")
 938        });
 939
 940        editor.update_in(cx, |editor, window, cx| {
 941            let nav_history = workspace
 942                .read(cx)
 943                .active_pane()
 944                .read(cx)
 945                .nav_history_for_item(&cx.entity());
 946            editor.set_nav_history(Some(nav_history));
 947            window.focus(&editor.focus_handle(cx), cx)
 948        });
 949
 950        let _toml_server_1 = toml_server_1.next().await.unwrap();
 951        let _toml_server_2 = toml_server_2.next().await.unwrap();
 952
 953        // Trigger semantic tokens.
 954        editor.update_in(cx, |editor, _, cx| {
 955            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 956        });
 957        cx.executor().advance_clock(Duration::from_millis(200));
 958        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
 959        cx.run_until_parked();
 960        task.await;
 961
 962        assert_eq!(
 963            extract_semantic_highlights(&editor, &cx),
 964            vec![
 965                MultiBufferOffset(0)..MultiBufferOffset(1),
 966                MultiBufferOffset(4)..MultiBufferOffset(5),
 967            ]
 968        );
 969
 970        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 971        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 972    }
 973
 974    #[gpui::test]
 975    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 976        init_test(cx, |_| {});
 977
 978        update_test_language_settings(cx, &|language_settings| {
 979            language_settings.languages.0.insert(
 980                "TOML".into(),
 981                LanguageSettingsContent {
 982                    semantic_tokens: Some(SemanticTokens::Full),
 983                    ..LanguageSettingsContent::default()
 984                },
 985            );
 986            language_settings.languages.0.insert(
 987                "Rust".into(),
 988                LanguageSettingsContent {
 989                    semantic_tokens: Some(SemanticTokens::Full),
 990                    ..LanguageSettingsContent::default()
 991                },
 992            );
 993        });
 994
 995        let toml_language = Arc::new(Language::new(
 996            LanguageConfig {
 997                name: "TOML".into(),
 998                matcher: LanguageMatcher {
 999                    path_suffixes: vec!["toml".into()],
1000                    ..LanguageMatcher::default()
1001                },
1002                ..LanguageConfig::default()
1003            },
1004            None,
1005        ));
1006        let rust_language = Arc::new(Language::new(
1007            LanguageConfig {
1008                name: "Rust".into(),
1009                matcher: LanguageMatcher {
1010                    path_suffixes: vec!["rs".into()],
1011                    ..LanguageMatcher::default()
1012                },
1013                ..LanguageConfig::default()
1014            },
1015            None,
1016        ));
1017
1018        let toml_legend = lsp::SemanticTokensLegend {
1019            token_types: vec!["property".into()],
1020            token_modifiers: Vec::new(),
1021        };
1022        let rust_legend = lsp::SemanticTokensLegend {
1023            token_types: vec!["constant".into()],
1024            token_modifiers: Vec::new(),
1025        };
1026
1027        let app_state = cx.update(workspace::AppState::test);
1028
1029        cx.update(|cx| {
1030            assets::Assets.load_test_fonts(cx);
1031            crate::init(cx);
1032            workspace::init(app_state.clone(), cx);
1033        });
1034
1035        let project = Project::test(app_state.fs.clone(), [], cx).await;
1036        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1037        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1038        let full_counter_toml_clone = full_counter_toml.clone();
1039
1040        let mut toml_server = language_registry.register_fake_lsp(
1041            toml_language.name(),
1042            FakeLspAdapter {
1043                name: "toml",
1044                capabilities: lsp::ServerCapabilities {
1045                    semantic_tokens_provider: Some(
1046                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1047                            lsp::SemanticTokensOptions {
1048                                legend: toml_legend,
1049                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1050                                ..lsp::SemanticTokensOptions::default()
1051                            },
1052                        ),
1053                    ),
1054                    ..lsp::ServerCapabilities::default()
1055                },
1056                initializer: Some(Box::new({
1057                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1058                    move |fake_server| {
1059                        let full_counter = full_counter_toml_clone.clone();
1060                        fake_server
1061                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1062                                move |_, _| {
1063                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1064                                    async move {
1065                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1066                                            lsp::SemanticTokens {
1067                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1068                                                data: vec![
1069                                                    0, // delta_line (line 0)
1070                                                    0, // delta_start
1071                                                    1, // length
1072                                                    0, // token_type
1073                                                    0, // token_modifiers_bitset
1074                                                    1, // delta_line (line 1)
1075                                                    0, // delta_start
1076                                                    1, // length
1077                                                    0, // token_type
1078                                                    0, // token_modifiers_bitset
1079                                                    1, // delta_line (line 2)
1080                                                    0, // delta_start
1081                                                    1, // length
1082                                                    0, // token_type
1083                                                    0, // token_modifiers_bitset
1084                                                ],
1085                                                result_id: Some("a".into()),
1086                                            },
1087                                        )))
1088                                    }
1089                                },
1090                            );
1091                    }
1092                })),
1093                ..FakeLspAdapter::default()
1094            },
1095        );
1096        language_registry.add(toml_language.clone());
1097        let mut rust_server = language_registry.register_fake_lsp(
1098            rust_language.name(),
1099            FakeLspAdapter {
1100                name: "rust",
1101                capabilities: lsp::ServerCapabilities {
1102                    semantic_tokens_provider: Some(
1103                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1104                            lsp::SemanticTokensOptions {
1105                                legend: rust_legend,
1106                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1107                                ..lsp::SemanticTokensOptions::default()
1108                            },
1109                        ),
1110                    ),
1111                    ..lsp::ServerCapabilities::default()
1112                },
1113                ..FakeLspAdapter::default()
1114            },
1115        );
1116        language_registry.add(rust_language.clone());
1117
1118        app_state
1119            .fs
1120            .as_fake()
1121            .insert_tree(
1122                EditorLspTestContext::root_path(),
1123                json!({
1124                    ".git": {},
1125                    "dir": {
1126                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1127                        "bar.rs": "const c: usize = 3;\n",
1128                    }
1129                }),
1130            )
1131            .await;
1132
1133        let (multi_workspace, cx) =
1134            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1135        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1136        project
1137            .update(cx, |project, cx| {
1138                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1139            })
1140            .await
1141            .unwrap();
1142        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1143            .await;
1144
1145        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1146        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1147        let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1148            (
1149                workspace.open_path(toml_file, None, true, window, cx),
1150                workspace.open_path(rust_file, None, true, window, cx),
1151            )
1152        });
1153        let toml_item = toml_item.await.expect("Could not open test file");
1154        let rust_item = rust_item.await.expect("Could not open test file");
1155
1156        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1157            (
1158                toml_item
1159                    .act_as::<Editor>(cx)
1160                    .expect("Opened test file wasn't an editor"),
1161                rust_item
1162                    .act_as::<Editor>(cx)
1163                    .expect("Opened test file wasn't an editor"),
1164            )
1165        });
1166        let toml_buffer = cx.read(|cx| {
1167            toml_editor
1168                .read(cx)
1169                .buffer()
1170                .read(cx)
1171                .as_singleton()
1172                .unwrap()
1173        });
1174        let rust_buffer = cx.read(|cx| {
1175            rust_editor
1176                .read(cx)
1177                .buffer()
1178                .read(cx)
1179                .as_singleton()
1180                .unwrap()
1181        });
1182        let multibuffer = cx.new(|cx| {
1183            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1184            multibuffer.set_excerpts_for_path(
1185                PathKey::sorted(0),
1186                toml_buffer.clone(),
1187                [Point::new(0, 0)..Point::new(0, 4)],
1188                0,
1189                cx,
1190            );
1191            multibuffer.set_excerpts_for_path(
1192                PathKey::sorted(1),
1193                rust_buffer.clone(),
1194                [Point::new(0, 0)..Point::new(0, 4)],
1195                0,
1196                cx,
1197            );
1198            multibuffer
1199        });
1200
1201        let editor = workspace.update_in(cx, |workspace, window, cx| {
1202            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1203            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1204            editor
1205        });
1206        editor.update_in(cx, |editor, window, cx| {
1207            let nav_history = workspace
1208                .read(cx)
1209                .active_pane()
1210                .read(cx)
1211                .nav_history_for_item(&cx.entity());
1212            editor.set_nav_history(Some(nav_history));
1213            window.focus(&editor.focus_handle(cx), cx)
1214        });
1215
1216        let _toml_server = toml_server.next().await.unwrap();
1217        let _rust_server = rust_server.next().await.unwrap();
1218
1219        // Initial request.
1220        cx.executor().advance_clock(Duration::from_millis(200));
1221        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1222        cx.run_until_parked();
1223        task.await;
1224        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1225        cx.run_until_parked();
1226
1227        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1228        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1229        assert_eq!(
1230            extract_semantic_highlights(&editor, &cx),
1231            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1232        );
1233
1234        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1235        let toml_anchor = editor.read_with(cx, |editor, cx| {
1236            editor
1237                .buffer()
1238                .read(cx)
1239                .snapshot(cx)
1240                .anchor_in_excerpt(text::Anchor::min_for_buffer(
1241                    toml_buffer.read(cx).remote_id(),
1242                ))
1243                .unwrap()
1244        });
1245        editor.update_in(cx, |editor, _, cx| {
1246            editor.buffer().update(cx, |buffer, cx| {
1247                buffer.expand_excerpts([toml_anchor], 2, ExpandExcerptDirection::Down, cx);
1248            });
1249        });
1250
1251        // Wait for semantic tokens to be re-fetched after expansion.
1252        cx.executor().advance_clock(Duration::from_millis(200));
1253        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1254        cx.run_until_parked();
1255        task.await;
1256
1257        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1258        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1259        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1260        assert_eq!(
1261            extract_semantic_highlights(&editor, &cx),
1262            vec![
1263                MultiBufferOffset(0)..MultiBufferOffset(1),
1264                MultiBufferOffset(6)..MultiBufferOffset(7),
1265                MultiBufferOffset(12)..MultiBufferOffset(13),
1266            ]
1267        );
1268    }
1269
1270    #[gpui::test]
1271    async fn lsp_semantic_tokens_singleton_opened_from_multibuffer(cx: &mut TestAppContext) {
1272        init_test(cx, |_| {});
1273
1274        update_test_language_settings(cx, &|language_settings| {
1275            language_settings.languages.0.insert(
1276                "Rust".into(),
1277                LanguageSettingsContent {
1278                    semantic_tokens: Some(SemanticTokens::Full),
1279                    ..LanguageSettingsContent::default()
1280                },
1281            );
1282        });
1283
1284        let rust_language = Arc::new(Language::new(
1285            LanguageConfig {
1286                name: "Rust".into(),
1287                matcher: LanguageMatcher {
1288                    path_suffixes: vec!["rs".into()],
1289                    ..LanguageMatcher::default()
1290                },
1291                ..LanguageConfig::default()
1292            },
1293            None,
1294        ));
1295
1296        let rust_legend = lsp::SemanticTokensLegend {
1297            token_types: vec!["function".into()],
1298            token_modifiers: Vec::new(),
1299        };
1300
1301        let app_state = cx.update(workspace::AppState::test);
1302        cx.update(|cx| {
1303            assets::Assets.load_test_fonts(cx);
1304            crate::init(cx);
1305            workspace::init(app_state.clone(), cx);
1306        });
1307
1308        let project = Project::test(app_state.fs.clone(), [], cx).await;
1309        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1310
1311        let mut rust_server = language_registry.register_fake_lsp(
1312            rust_language.name(),
1313            FakeLspAdapter {
1314                name: "rust",
1315                capabilities: lsp::ServerCapabilities {
1316                    semantic_tokens_provider: Some(
1317                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1318                            lsp::SemanticTokensOptions {
1319                                legend: rust_legend,
1320                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1321                                ..lsp::SemanticTokensOptions::default()
1322                            },
1323                        ),
1324                    ),
1325                    ..lsp::ServerCapabilities::default()
1326                },
1327                initializer: Some(Box::new(move |fake_server| {
1328                    fake_server
1329                        .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1330                            move |_, _| async move {
1331                                Ok(Some(lsp::SemanticTokensResult::Tokens(
1332                                    lsp::SemanticTokens {
1333                                        data: vec![0, 3, 4, 0, 0],
1334                                        result_id: None,
1335                                    },
1336                                )))
1337                            },
1338                        );
1339                })),
1340                ..FakeLspAdapter::default()
1341            },
1342        );
1343        language_registry.add(rust_language.clone());
1344
1345        // foo.rs must be long enough that autoscroll triggers an actual scroll
1346        // position change when opening from the multibuffer with cursor near
1347        // the end. This reproduces the race: set_visible_line_count spawns a
1348        // task, then autoscroll fires ScrollPositionChanged whose handler
1349        // replaces post_scroll_update with a debounced task that skips
1350        // update_lsp_data for singletons.
1351        let mut foo_content = String::from("fn test() {}\n");
1352        for i in 0..100 {
1353            foo_content.push_str(&format!("fn func_{i}() {{}}\n"));
1354        }
1355
1356        app_state
1357            .fs
1358            .as_fake()
1359            .insert_tree(
1360                EditorLspTestContext::root_path(),
1361                json!({
1362                    ".git": {},
1363                    "bar.rs": "fn main() {}\n",
1364                    "foo.rs": foo_content,
1365                }),
1366            )
1367            .await;
1368
1369        let (multi_workspace, cx) =
1370            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1371        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1372        project
1373            .update(cx, |project, cx| {
1374                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1375            })
1376            .await
1377            .unwrap();
1378        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1379            .await;
1380
1381        // Open bar.rs as an editor to start the LSP server.
1382        let bar_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1383        let bar_item = workspace
1384            .update_in(cx, |workspace, window, cx| {
1385                workspace.open_path(bar_file, None, true, window, cx)
1386            })
1387            .await
1388            .expect("Could not open bar.rs");
1389        let bar_editor = cx.update(|_, cx| {
1390            bar_item
1391                .act_as::<Editor>(cx)
1392                .expect("Opened test file wasn't an editor")
1393        });
1394        let bar_buffer = cx.read(|cx| {
1395            bar_editor
1396                .read(cx)
1397                .buffer()
1398                .read(cx)
1399                .as_singleton()
1400                .unwrap()
1401        });
1402
1403        let _rust_server = rust_server.next().await.unwrap();
1404
1405        cx.executor().advance_clock(Duration::from_millis(200));
1406        let task = bar_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1407        cx.run_until_parked();
1408        task.await;
1409        cx.run_until_parked();
1410
1411        assert!(
1412            !extract_semantic_highlights(&bar_editor, &cx).is_empty(),
1413            "bar.rs should have semantic tokens after initial open"
1414        );
1415
1416        // Get foo.rs buffer directly from the project. No editor has ever
1417        // fetched semantic tokens for this buffer.
1418        let foo_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1419        let foo_buffer = project
1420            .update(cx, |project, cx| project.open_buffer(foo_file, cx))
1421            .await
1422            .expect("Could not open foo.rs buffer");
1423
1424        // Build a multibuffer with both files. The foo.rs excerpt covers a
1425        // range near the end of the file so that opening the singleton will
1426        // autoscroll to a position that requires changing scroll_position.
1427        let multibuffer = cx.new(|cx| {
1428            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1429            multibuffer.set_excerpts_for_path(
1430                PathKey::sorted(0),
1431                bar_buffer.clone(),
1432                [Point::new(0, 0)..Point::new(0, 12)],
1433                0,
1434                cx,
1435            );
1436            multibuffer.set_excerpts_for_path(
1437                PathKey::sorted(1),
1438                foo_buffer.clone(),
1439                [Point::new(95, 0)..Point::new(100, 0)],
1440                0,
1441                cx,
1442            );
1443            multibuffer
1444        });
1445
1446        let mb_editor = workspace.update_in(cx, |workspace, window, cx| {
1447            let editor =
1448                cx.new(|cx| build_editor_with_project(project.clone(), multibuffer, window, cx));
1449            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1450            editor
1451        });
1452        mb_editor.update_in(cx, |editor, window, cx| {
1453            let nav_history = workspace
1454                .read(cx)
1455                .active_pane()
1456                .read(cx)
1457                .nav_history_for_item(&cx.entity());
1458            editor.set_nav_history(Some(nav_history));
1459            window.focus(&editor.focus_handle(cx), cx)
1460        });
1461
1462        // Close bar.rs tab so only the multibuffer remains.
1463        workspace
1464            .update_in(cx, |workspace, window, cx| {
1465                let pane = workspace.active_pane().clone();
1466                pane.update(cx, |pane, cx| {
1467                    pane.close_item_by_id(
1468                        bar_editor.entity_id(),
1469                        workspace::SaveIntent::Skip,
1470                        window,
1471                        cx,
1472                    )
1473                })
1474            })
1475            .await
1476            .ok();
1477
1478        cx.run_until_parked();
1479
1480        // Position cursor in the foo.rs excerpt (near line 95+).
1481        mb_editor.update_in(cx, |editor, window, cx| {
1482            let snapshot = editor.display_snapshot(cx);
1483            let end = snapshot.buffer_snapshot().len();
1484            editor.change_selections(None.into(), window, cx, |s| {
1485                s.select_ranges([end..end]);
1486            });
1487        });
1488
1489        // Open the singleton from the multibuffer. open_buffers_in_workspace
1490        // creates the editor and calls change_selections with autoscroll.
1491        // During render, set_visible_line_count fires first (spawning a task),
1492        // then autoscroll_vertically scrolls to line ~95 which emits
1493        // ScrollPositionChanged, whose handler replaces post_scroll_update.
1494        mb_editor.update_in(cx, |editor, window, cx| {
1495            editor.open_excerpts(&crate::actions::OpenExcerpts, window, cx);
1496        });
1497
1498        cx.run_until_parked();
1499        cx.executor().advance_clock(Duration::from_millis(200));
1500        cx.run_until_parked();
1501
1502        let active_editor = workspace.read_with(cx, |workspace, cx| {
1503            workspace
1504                .active_item(cx)
1505                .and_then(|item| item.act_as::<Editor>(cx))
1506                .expect("Active item should be an editor")
1507        });
1508
1509        assert!(
1510            active_editor.read_with(cx, |editor, cx| editor.buffer().read(cx).is_singleton()),
1511            "Active editor should be a singleton buffer"
1512        );
1513
1514        // Wait for semantic tokens on the singleton.
1515        cx.executor().advance_clock(Duration::from_millis(200));
1516        let task = active_editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1517        task.await;
1518        cx.run_until_parked();
1519
1520        let highlights = extract_semantic_highlights(&active_editor, &cx);
1521        assert!(
1522            !highlights.is_empty(),
1523            "Singleton editor opened from multibuffer should have semantic tokens"
1524        );
1525    }
1526
1527    fn extract_semantic_highlights(
1528        editor: &Entity<Editor>,
1529        cx: &TestAppContext,
1530    ) -> Vec<Range<MultiBufferOffset>> {
1531        editor.read_with(cx, |editor, cx| {
1532            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1533            editor
1534                .display_map
1535                .read(cx)
1536                .semantic_token_highlights
1537                .iter()
1538                .flat_map(|(_, (v, _))| v.iter())
1539                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1540                .collect()
1541        })
1542    }
1543
1544    #[gpui::test]
1545    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1546        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1547        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1548
1549        init_test(cx, |_| {});
1550
1551        update_test_language_settings(cx, &|language_settings| {
1552            language_settings.languages.0.insert(
1553                "Rust".into(),
1554                LanguageSettingsContent {
1555                    semantic_tokens: Some(SemanticTokens::Full),
1556                    ..LanguageSettingsContent::default()
1557                },
1558            );
1559        });
1560
1561        let mut cx = EditorLspTestContext::new_rust(
1562            lsp::ServerCapabilities {
1563                semantic_tokens_provider: Some(
1564                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1565                        lsp::SemanticTokensOptions {
1566                            legend: lsp::SemanticTokensLegend {
1567                                token_types: Vec::from(["function".into()]),
1568                                token_modifiers: Vec::new(),
1569                            },
1570                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1571                            ..lsp::SemanticTokensOptions::default()
1572                        },
1573                    ),
1574                ),
1575                ..lsp::ServerCapabilities::default()
1576            },
1577            cx,
1578        )
1579        .await;
1580
1581        let mut full_request = cx
1582            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1583                move |_, _, _| {
1584                    async move {
1585                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1586                            lsp::SemanticTokens {
1587                                data: vec![
1588                                    0, // delta_line
1589                                    3, // delta_start
1590                                    4, // length
1591                                    0, // token_type (function)
1592                                    0, // token_modifiers_bitset
1593                                ],
1594                                result_id: None,
1595                            },
1596                        )))
1597                    }
1598                },
1599            );
1600
1601        // Trigger initial semantic tokens fetch
1602        cx.set_state("ˇfn main() {}");
1603        full_request.next().await;
1604        cx.run_until_parked();
1605
1606        // Verify initial highlights exist (with no custom color yet)
1607        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1608        assert_eq!(
1609            initial_ranges,
1610            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1611            "Should have initial semantic token highlights"
1612        );
1613        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1614        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1615        // Initial color should be None or theme default (not red or blue)
1616        let initial_color = initial_styles[0].color;
1617
1618        // Set a custom foreground color for function tokens via settings.json
1619        let red_color = Rgba {
1620            r: 1.0,
1621            g: 0.0,
1622            b: 0.0,
1623            a: 1.0,
1624        };
1625        cx.update(|_, cx| {
1626            SettingsStore::update_global(cx, |store, cx| {
1627                store.update_user_settings(cx, |settings| {
1628                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1629                        semantic_token_rules: Some(SemanticTokenRules {
1630                            rules: Vec::from([SemanticTokenRule {
1631                                token_type: Some("function".to_string()),
1632                                foreground_color: Some(red_color),
1633                                ..SemanticTokenRule::default()
1634                            }]),
1635                        }),
1636                        ..GlobalLspSettingsContent::default()
1637                    });
1638                });
1639            });
1640        });
1641
1642        // Trigger a refetch by making an edit (which forces semantic tokens update)
1643        cx.set_state("ˇfn main() { }");
1644        full_request.next().await;
1645        cx.run_until_parked();
1646
1647        // Verify the highlights now have the custom red color
1648        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1649        assert_eq!(
1650            styles_after_settings_change.len(),
1651            1,
1652            "Should still have one highlight"
1653        );
1654        assert_eq!(
1655            styles_after_settings_change[0].color,
1656            Some(Hsla::from(red_color)),
1657            "Highlight should have the custom red color from settings.json"
1658        );
1659        assert_ne!(
1660            styles_after_settings_change[0].color, initial_color,
1661            "Color should have changed from initial"
1662        );
1663    }
1664
1665    #[gpui::test]
1666    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1667        use collections::IndexMap;
1668        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1669        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1670
1671        init_test(cx, |_| {});
1672
1673        update_test_language_settings(cx, &|language_settings| {
1674            language_settings.languages.0.insert(
1675                "Rust".into(),
1676                LanguageSettingsContent {
1677                    semantic_tokens: Some(SemanticTokens::Full),
1678                    ..LanguageSettingsContent::default()
1679                },
1680            );
1681        });
1682
1683        let mut cx = EditorLspTestContext::new_rust(
1684            lsp::ServerCapabilities {
1685                semantic_tokens_provider: Some(
1686                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1687                        lsp::SemanticTokensOptions {
1688                            legend: lsp::SemanticTokensLegend {
1689                                token_types: Vec::from(["function".into()]),
1690                                token_modifiers: Vec::new(),
1691                            },
1692                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1693                            ..lsp::SemanticTokensOptions::default()
1694                        },
1695                    ),
1696                ),
1697                ..lsp::ServerCapabilities::default()
1698            },
1699            cx,
1700        )
1701        .await;
1702
1703        let mut full_request = cx
1704            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1705                move |_, _, _| async move {
1706                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1707                        lsp::SemanticTokens {
1708                            data: vec![
1709                                0, // delta_line
1710                                3, // delta_start
1711                                4, // length
1712                                0, // token_type (function)
1713                                0, // token_modifiers_bitset
1714                            ],
1715                            result_id: None,
1716                        },
1717                    )))
1718                },
1719            );
1720
1721        cx.set_state("ˇfn main() {}");
1722        full_request.next().await;
1723        cx.run_until_parked();
1724
1725        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1726        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1727        let initial_color = initial_styles[0].color;
1728
1729        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1730        // which fires theme_changed → refresh_semantic_token_highlights.
1731        let red_color: Hsla = Rgba {
1732            r: 1.0,
1733            g: 0.0,
1734            b: 0.0,
1735            a: 1.0,
1736        }
1737        .into();
1738        cx.update(|_, cx| {
1739            SettingsStore::update_global(cx, |store, cx| {
1740                store.update_user_settings(cx, |settings| {
1741                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1742                        syntax: IndexMap::from_iter([(
1743                            "function".to_string(),
1744                            HighlightStyleContent {
1745                                color: Some("#ff0000".to_string()),
1746                                background_color: None,
1747                                font_style: None,
1748                                font_weight: None,
1749                            },
1750                        )]),
1751                        ..ThemeStyleContent::default()
1752                    });
1753                });
1754            });
1755        });
1756
1757        cx.executor().advance_clock(Duration::from_millis(200));
1758        cx.run_until_parked();
1759
1760        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1761        assert_eq!(styles_after_override.len(), 1);
1762        assert_eq!(
1763            styles_after_override[0].color,
1764            Some(red_color),
1765            "Highlight should have red color from theme override"
1766        );
1767        assert_ne!(
1768            styles_after_override[0].color, initial_color,
1769            "Color should have changed from initial"
1770        );
1771
1772        // Changing the override to a different color also restyles.
1773        let blue_color: Hsla = Rgba {
1774            r: 0.0,
1775            g: 0.0,
1776            b: 1.0,
1777            a: 1.0,
1778        }
1779        .into();
1780        cx.update(|_, cx| {
1781            SettingsStore::update_global(cx, |store, cx| {
1782                store.update_user_settings(cx, |settings| {
1783                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1784                        syntax: IndexMap::from_iter([(
1785                            "function".to_string(),
1786                            HighlightStyleContent {
1787                                color: Some("#0000ff".to_string()),
1788                                background_color: None,
1789                                font_style: None,
1790                                font_weight: None,
1791                            },
1792                        )]),
1793                        ..ThemeStyleContent::default()
1794                    });
1795                });
1796            });
1797        });
1798
1799        cx.executor().advance_clock(Duration::from_millis(200));
1800        cx.run_until_parked();
1801
1802        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1803        assert_eq!(styles_after_second_override.len(), 1);
1804        assert_eq!(
1805            styles_after_second_override[0].color,
1806            Some(blue_color),
1807            "Highlight should have blue color from updated theme override"
1808        );
1809
1810        // Removing overrides reverts to the original theme color.
1811        cx.update(|_, cx| {
1812            SettingsStore::update_global(cx, |store, cx| {
1813                store.update_user_settings(cx, |settings| {
1814                    settings.theme.experimental_theme_overrides = None;
1815                });
1816            });
1817        });
1818
1819        cx.executor().advance_clock(Duration::from_millis(200));
1820        cx.run_until_parked();
1821
1822        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1823        assert_eq!(styles_after_clear.len(), 1);
1824        assert_eq!(
1825            styles_after_clear[0].color, initial_color,
1826            "Highlight should revert to initial color after clearing overrides"
1827        );
1828    }
1829
1830    #[gpui::test]
1831    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1832        use collections::IndexMap;
1833        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1834        use theme_settings::{HighlightStyleContent, ThemeStyleContent};
1835        use ui::ActiveTheme as _;
1836
1837        init_test(cx, |_| {});
1838
1839        update_test_language_settings(cx, &|language_settings| {
1840            language_settings.languages.0.insert(
1841                "Rust".into(),
1842                LanguageSettingsContent {
1843                    semantic_tokens: Some(SemanticTokens::Full),
1844                    ..LanguageSettingsContent::default()
1845                },
1846            );
1847        });
1848
1849        let mut cx = EditorLspTestContext::new_rust(
1850            lsp::ServerCapabilities {
1851                semantic_tokens_provider: Some(
1852                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1853                        lsp::SemanticTokensOptions {
1854                            legend: lsp::SemanticTokensLegend {
1855                                token_types: Vec::from(["function".into()]),
1856                                token_modifiers: Vec::new(),
1857                            },
1858                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1859                            ..lsp::SemanticTokensOptions::default()
1860                        },
1861                    ),
1862                ),
1863                ..lsp::ServerCapabilities::default()
1864            },
1865            cx,
1866        )
1867        .await;
1868
1869        let mut full_request = cx
1870            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1871                move |_, _, _| async move {
1872                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1873                        lsp::SemanticTokens {
1874                            data: vec![
1875                                0, // delta_line
1876                                3, // delta_start
1877                                4, // length
1878                                0, // token_type (function)
1879                                0, // token_modifiers_bitset
1880                            ],
1881                            result_id: None,
1882                        },
1883                    )))
1884                },
1885            );
1886
1887        cx.set_state("ˇfn main() {}");
1888        full_request.next().await;
1889        cx.run_until_parked();
1890
1891        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1892        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1893        let initial_color = initial_styles[0].color;
1894
1895        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1896        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1897        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1898        let green_color: Hsla = Rgba {
1899            r: 0.0,
1900            g: 1.0,
1901            b: 0.0,
1902            a: 1.0,
1903        }
1904        .into();
1905        cx.update(|_, cx| {
1906            SettingsStore::update_global(cx, |store, cx| {
1907                store.update_user_settings(cx, |settings| {
1908                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1909                        theme_name.clone(),
1910                        ThemeStyleContent {
1911                            syntax: IndexMap::from_iter([(
1912                                "function".to_string(),
1913                                HighlightStyleContent {
1914                                    color: Some("#00ff00".to_string()),
1915                                    background_color: None,
1916                                    font_style: None,
1917                                    font_weight: None,
1918                                },
1919                            )]),
1920                            ..ThemeStyleContent::default()
1921                        },
1922                    )]);
1923                });
1924            });
1925        });
1926
1927        cx.executor().advance_clock(Duration::from_millis(200));
1928        cx.run_until_parked();
1929
1930        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1931        assert_eq!(styles_after_override.len(), 1);
1932        assert_eq!(
1933            styles_after_override[0].color,
1934            Some(green_color),
1935            "Highlight should have green color from per-theme override"
1936        );
1937        assert_ne!(
1938            styles_after_override[0].color, initial_color,
1939            "Color should have changed from initial"
1940        );
1941    }
1942
1943    #[gpui::test]
1944    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1945        init_test(cx, |_| {});
1946
1947        update_test_language_settings(cx, &|language_settings| {
1948            language_settings.languages.0.insert(
1949                "Rust".into(),
1950                LanguageSettingsContent {
1951                    semantic_tokens: Some(SemanticTokens::Full),
1952                    ..LanguageSettingsContent::default()
1953                },
1954            );
1955        });
1956
1957        let mut cx = EditorLspTestContext::new_rust(
1958            lsp::ServerCapabilities {
1959                semantic_tokens_provider: Some(
1960                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1961                        lsp::SemanticTokensOptions {
1962                            legend: lsp::SemanticTokensLegend {
1963                                token_types: vec!["function".into()],
1964                                token_modifiers: Vec::new(),
1965                            },
1966                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1967                            ..lsp::SemanticTokensOptions::default()
1968                        },
1969                    ),
1970                ),
1971                ..lsp::ServerCapabilities::default()
1972            },
1973            cx,
1974        )
1975        .await;
1976
1977        let mut full_request = cx
1978            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1979                move |_, _, _| async move {
1980                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1981                        lsp::SemanticTokens {
1982                            data: vec![
1983                                0, // delta_line
1984                                3, // delta_start
1985                                4, // length
1986                                0, // token_type
1987                                0, // token_modifiers_bitset
1988                            ],
1989                            result_id: None,
1990                        },
1991                    )))
1992                },
1993            );
1994
1995        cx.set_state("ˇfn main() {}");
1996        assert!(full_request.next().await.is_some());
1997        cx.run_until_parked();
1998
1999        assert_eq!(
2000            extract_semantic_highlights(&cx.editor, &cx),
2001            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
2002            "Semantic tokens should be present before stopping the server"
2003        );
2004
2005        cx.update_editor(|editor, _, cx| {
2006            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
2007            editor.project.as_ref().unwrap().update(cx, |project, cx| {
2008                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
2009            })
2010        });
2011        cx.executor().advance_clock(Duration::from_millis(200));
2012        cx.run_until_parked();
2013
2014        assert_eq!(
2015            extract_semantic_highlights(&cx.editor, &cx),
2016            Vec::new(),
2017            "Semantic tokens should be cleared after stopping the server"
2018        );
2019    }
2020
2021    #[gpui::test]
2022    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
2023        init_test(cx, |_| {});
2024
2025        update_test_language_settings(cx, &|language_settings| {
2026            language_settings.languages.0.insert(
2027                "Rust".into(),
2028                LanguageSettingsContent {
2029                    semantic_tokens: Some(SemanticTokens::Full),
2030                    ..LanguageSettingsContent::default()
2031                },
2032            );
2033        });
2034
2035        let mut cx = EditorLspTestContext::new_rust(
2036            lsp::ServerCapabilities {
2037                semantic_tokens_provider: Some(
2038                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2039                        lsp::SemanticTokensOptions {
2040                            legend: lsp::SemanticTokensLegend {
2041                                token_types: vec!["function".into()],
2042                                token_modifiers: Vec::new(),
2043                            },
2044                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2045                            ..lsp::SemanticTokensOptions::default()
2046                        },
2047                    ),
2048                ),
2049                ..lsp::ServerCapabilities::default()
2050            },
2051            cx,
2052        )
2053        .await;
2054
2055        let mut full_request = cx
2056            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2057                move |_, _, _| async move {
2058                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2059                        lsp::SemanticTokens {
2060                            data: vec![
2061                                0, // delta_line
2062                                3, // delta_start
2063                                4, // length
2064                                0, // token_type
2065                                0, // token_modifiers_bitset
2066                            ],
2067                            result_id: None,
2068                        },
2069                    )))
2070                },
2071            );
2072
2073        cx.set_state("ˇfn main() {}");
2074        assert!(full_request.next().await.is_some());
2075        cx.run_until_parked();
2076
2077        assert_eq!(
2078            extract_semantic_highlights(&cx.editor, &cx),
2079            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
2080            "Semantic tokens should be present before disabling the setting"
2081        );
2082
2083        update_test_language_settings(&mut cx, &|language_settings| {
2084            language_settings.languages.0.insert(
2085                "Rust".into(),
2086                LanguageSettingsContent {
2087                    semantic_tokens: Some(SemanticTokens::Off),
2088                    ..LanguageSettingsContent::default()
2089                },
2090            );
2091        });
2092        cx.executor().advance_clock(Duration::from_millis(200));
2093        cx.run_until_parked();
2094
2095        assert_eq!(
2096            extract_semantic_highlights(&cx.editor, &cx),
2097            Vec::new(),
2098            "Semantic tokens should be cleared after disabling the setting"
2099        );
2100    }
2101
2102    #[gpui::test]
2103    async fn test_semantic_token_disabling_with_empty_rule(cx: &mut TestAppContext) {
2104        init_test(cx, |_| {});
2105        update_test_language_settings(cx, &|s| {
2106            s.languages.0.insert(
2107                "Rust".into(),
2108                LanguageSettingsContent {
2109                    semantic_tokens: Some(SemanticTokens::Full),
2110                    ..Default::default()
2111                },
2112            );
2113        });
2114
2115        let mut cx = EditorLspTestContext::new_rust(
2116            lsp::ServerCapabilities {
2117                semantic_tokens_provider: Some(
2118                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2119                        lsp::SemanticTokensOptions {
2120                            legend: lsp::SemanticTokensLegend {
2121                                token_types: vec!["function".into()],
2122                                token_modifiers: vec![],
2123                            },
2124                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2125                            ..Default::default()
2126                        },
2127                    ),
2128                ),
2129                ..Default::default()
2130            },
2131            cx,
2132        )
2133        .await;
2134
2135        let mut full_request = cx
2136            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2137                move |_, _, _| async move {
2138                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2139                        lsp::SemanticTokens {
2140                            data: vec![0, 3, 4, 0, 0],
2141                            result_id: None,
2142                        },
2143                    )))
2144                },
2145            );
2146
2147        // Verify it highlights by default
2148        cx.set_state("ˇfn main() {}");
2149        full_request.next().await;
2150        cx.run_until_parked();
2151        assert_eq!(extract_semantic_highlights(&cx.editor, &cx).len(), 1);
2152
2153        // Apply EMPTY rule to disable it
2154        cx.update(|_, cx| {
2155            SettingsStore::update_global(cx, |store, cx| {
2156                store.update_user_settings(cx, |settings| {
2157                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2158                        semantic_token_rules: Some(SemanticTokenRules {
2159                            rules: vec![SemanticTokenRule {
2160                                token_type: Some("function".to_string()),
2161                                ..Default::default()
2162                            }],
2163                        }),
2164                        ..Default::default()
2165                    });
2166                });
2167            });
2168        });
2169
2170        cx.set_state("ˇfn main() { }");
2171        full_request.next().await;
2172        cx.run_until_parked();
2173
2174        assert!(
2175            extract_semantic_highlights(&cx.editor, &cx).is_empty(),
2176            "Highlighting should be disabled by empty style setting"
2177        );
2178    }
2179
2180    #[gpui::test]
2181    async fn test_semantic_token_broad_rule_disables_specific_token(cx: &mut TestAppContext) {
2182        init_test(cx, |_| {});
2183        update_test_language_settings(cx, &|s| {
2184            s.languages.0.insert(
2185                "Rust".into(),
2186                LanguageSettingsContent {
2187                    semantic_tokens: Some(SemanticTokens::Full),
2188                    ..Default::default()
2189                },
2190            );
2191        });
2192
2193        let mut cx = EditorLspTestContext::new_rust(
2194            lsp::ServerCapabilities {
2195                semantic_tokens_provider: Some(
2196                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2197                        lsp::SemanticTokensOptions {
2198                            legend: lsp::SemanticTokensLegend {
2199                                token_types: vec!["comment".into()],
2200                                token_modifiers: vec!["documentation".into()],
2201                            },
2202                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2203                            ..Default::default()
2204                        },
2205                    ),
2206                ),
2207                ..Default::default()
2208            },
2209            cx,
2210        )
2211        .await;
2212
2213        let mut full_request = cx
2214            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2215                move |_, _, _| async move {
2216                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2217                        lsp::SemanticTokens {
2218                            data: vec![0, 0, 5, 0, 1], // comment [documentation]
2219                            result_id: None,
2220                        },
2221                    )))
2222                },
2223            );
2224
2225        cx.set_state("ˇ/// d\n");
2226        full_request.next().await;
2227        cx.run_until_parked();
2228        assert_eq!(
2229            extract_semantic_highlights(&cx.editor, &cx).len(),
2230            1,
2231            "Documentation comment should be highlighted"
2232        );
2233
2234        // Apply a BROAD empty rule for "comment" (no modifiers)
2235        cx.update(|_, cx| {
2236            SettingsStore::update_global(cx, |store, cx| {
2237                store.update_user_settings(cx, |settings| {
2238                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2239                        semantic_token_rules: Some(SemanticTokenRules {
2240                            rules: vec![SemanticTokenRule {
2241                                token_type: Some("comment".to_string()),
2242                                ..Default::default()
2243                            }],
2244                        }),
2245                        ..Default::default()
2246                    });
2247                });
2248            });
2249        });
2250
2251        cx.set_state("ˇ/// d\n");
2252        full_request.next().await;
2253        cx.run_until_parked();
2254
2255        assert!(
2256            extract_semantic_highlights(&cx.editor, &cx).is_empty(),
2257            "Broad empty rule should disable specific documentation comment"
2258        );
2259    }
2260
2261    #[gpui::test]
2262    async fn test_semantic_token_specific_rule_does_not_disable_broad_token(
2263        cx: &mut TestAppContext,
2264    ) {
2265        use gpui::UpdateGlobal as _;
2266        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
2267
2268        init_test(cx, |_| {});
2269        update_test_language_settings(cx, &|s| {
2270            s.languages.0.insert(
2271                "Rust".into(),
2272                LanguageSettingsContent {
2273                    semantic_tokens: Some(SemanticTokens::Full),
2274                    ..Default::default()
2275                },
2276            );
2277        });
2278
2279        let mut cx = EditorLspTestContext::new_rust(
2280            lsp::ServerCapabilities {
2281                semantic_tokens_provider: Some(
2282                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2283                        lsp::SemanticTokensOptions {
2284                            legend: lsp::SemanticTokensLegend {
2285                                token_types: vec!["comment".into()],
2286                                token_modifiers: vec!["documentation".into()],
2287                            },
2288                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2289                            ..Default::default()
2290                        },
2291                    ),
2292                ),
2293                ..Default::default()
2294            },
2295            cx,
2296        )
2297        .await;
2298
2299        let mut full_request = cx
2300            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2301                move |_, _, _| async move {
2302                    Ok(Some(lsp::SemanticTokensResult::Tokens(
2303                        lsp::SemanticTokens {
2304                            data: vec![
2305                                0, 0, 5, 0, 1, // comment [documentation]
2306                                1, 0, 5, 0, 0, // normal comment
2307                            ],
2308                            result_id: None,
2309                        },
2310                    )))
2311                },
2312            );
2313
2314        cx.set_state("ˇ/// d\n// n\n");
2315        full_request.next().await;
2316        cx.run_until_parked();
2317        assert_eq!(
2318            extract_semantic_highlights(&cx.editor, &cx).len(),
2319            2,
2320            "Both documentation and normal comments should be highlighted initially"
2321        );
2322
2323        // Apply a SPECIFIC empty rule for documentation only
2324        cx.update(|_, cx| {
2325            SettingsStore::update_global(cx, |store, cx| {
2326                store.update_user_settings(cx, |settings| {
2327                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
2328                        semantic_token_rules: Some(SemanticTokenRules {
2329                            rules: vec![SemanticTokenRule {
2330                                token_type: Some("comment".to_string()),
2331                                token_modifiers: vec!["documentation".to_string()],
2332                                ..Default::default()
2333                            }],
2334                        }),
2335                        ..Default::default()
2336                    });
2337                });
2338            });
2339        });
2340
2341        cx.set_state("ˇ/// d\n// n\n");
2342        full_request.next().await;
2343        cx.run_until_parked();
2344
2345        assert_eq!(
2346            extract_semantic_highlights(&cx.editor, &cx).len(),
2347            1,
2348            "Normal comment should still be highlighted (matched by default rule)"
2349        );
2350    }
2351
2352    #[gpui::test]
2353    async fn test_diagnostics_visible_when_semantic_token_set_to_full(cx: &mut TestAppContext) {
2354        init_test(cx, |_| {});
2355
2356        update_test_language_settings(cx, &|language_settings| {
2357            language_settings.languages.0.insert(
2358                "Rust".into(),
2359                LanguageSettingsContent {
2360                    semantic_tokens: Some(SemanticTokens::Full),
2361                    ..LanguageSettingsContent::default()
2362                },
2363            );
2364        });
2365
2366        let mut cx = EditorLspTestContext::new_rust(
2367            lsp::ServerCapabilities {
2368                semantic_tokens_provider: Some(
2369                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
2370                        lsp::SemanticTokensOptions {
2371                            legend: lsp::SemanticTokensLegend {
2372                                token_types: vec!["function".into()],
2373                                token_modifiers: Vec::new(),
2374                            },
2375                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
2376                            ..lsp::SemanticTokensOptions::default()
2377                        },
2378                    ),
2379                ),
2380                ..lsp::ServerCapabilities::default()
2381            },
2382            cx,
2383        )
2384        .await;
2385
2386        let mut full_request = cx
2387            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
2388                move |_, _, _| {
2389                    async move {
2390                        Ok(Some(lsp::SemanticTokensResult::Tokens(
2391                            lsp::SemanticTokens {
2392                                data: vec![
2393                                    0, // delta_line
2394                                    3, // delta_start
2395                                    4, // length
2396                                    0, // token_type
2397                                    0, // token_modifiers_bitset
2398                                ],
2399                                result_id: Some("a".into()),
2400                            },
2401                        )))
2402                    }
2403                },
2404            );
2405
2406        cx.set_state("ˇfn main() {}");
2407        assert!(full_request.next().await.is_some());
2408
2409        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
2410        task.await;
2411
2412        cx.update_buffer(|buffer, cx| {
2413            buffer.update_diagnostics(
2414                LanguageServerId(0),
2415                DiagnosticSet::new(
2416                    [DiagnosticEntry {
2417                        range: PointUtf16::new(0, 3)..PointUtf16::new(0, 7),
2418                        diagnostic: Diagnostic {
2419                            severity: lsp::DiagnosticSeverity::ERROR,
2420                            group_id: 1,
2421                            message: "unused function".into(),
2422                            ..Default::default()
2423                        },
2424                    }],
2425                    buffer,
2426                ),
2427                cx,
2428            )
2429        });
2430
2431        cx.run_until_parked();
2432        let chunks = cx.update_editor(|editor, window, cx| {
2433            editor
2434                .snapshot(window, cx)
2435                .display_snapshot
2436                .chunks(
2437                    crate::display_map::DisplayRow(0)..crate::display_map::DisplayRow(1),
2438                    LanguageAwareStyling {
2439                        tree_sitter: false,
2440                        diagnostics: true,
2441                    },
2442                    crate::HighlightStyles::default(),
2443                )
2444                .map(|chunk| {
2445                    (
2446                        chunk.text.to_string(),
2447                        chunk.diagnostic_severity,
2448                        chunk.highlight_style,
2449                    )
2450                })
2451                .collect::<Vec<_>>()
2452        });
2453
2454        assert_eq!(
2455            extract_semantic_highlights(&cx.editor, &cx),
2456            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
2457        );
2458
2459        assert!(
2460            chunks.iter().any(
2461                |(text, severity, style): &(
2462                    String,
2463                    Option<lsp::DiagnosticSeverity>,
2464                    Option<gpui::HighlightStyle>
2465                )| {
2466                    text == "main"
2467                        && *severity == Some(lsp::DiagnosticSeverity::ERROR)
2468                        && style.is_some()
2469                }
2470            ),
2471            "expected 'main' chunk to have both diagnostic and semantic styling: {:?}",
2472            chunks
2473        );
2474    }
2475
2476    fn extract_semantic_highlight_styles(
2477        editor: &Entity<Editor>,
2478        cx: &TestAppContext,
2479    ) -> Vec<HighlightStyle> {
2480        editor.read_with(cx, |editor, cx| {
2481            editor
2482                .display_map
2483                .read(cx)
2484                .semantic_token_highlights
2485                .iter()
2486                .flat_map(|(_, (v, interner))| {
2487                    v.iter().map(|highlights| interner[highlights.style])
2488                })
2489                .collect()
2490        })
2491    }
2492}