semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use itertools::Itertools;
   9use language::language_settings::language_settings;
  10use project::{
  11    lsp_store::{
  12        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  13        TokenType,
  14    },
  15    project_settings::ProjectSettings,
  16};
  17use settings::{
  18    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
  19    SemanticTokenRules, Settings as _,
  20};
  21use text::BufferId;
  22use theme::SyntaxTheme;
  23use ui::ActiveTheme as _;
  24
  25use crate::{
  26    Editor,
  27    actions::ToggleSemanticHighlights,
  28    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  29};
  30
  31pub(super) struct SemanticTokenState {
  32    rules: SemanticTokenRules,
  33    enabled: bool,
  34    update_task: Task<()>,
  35    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  36}
  37
  38impl SemanticTokenState {
  39    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  40        Self {
  41            rules: ProjectSettings::get_global(cx)
  42                .global_lsp_settings
  43                .semantic_token_rules
  44                .clone(),
  45            enabled,
  46            update_task: Task::ready(()),
  47            fetched_for_buffers: HashMap::default(),
  48        }
  49    }
  50
  51    pub(super) fn enabled(&self) -> bool {
  52        self.enabled
  53    }
  54
  55    pub(super) fn toggle_enabled(&mut self) {
  56        self.enabled = !self.enabled;
  57    }
  58
  59    #[cfg(test)]
  60    pub(super) fn take_update_task(&mut self) -> Task<()> {
  61        std::mem::replace(&mut self.update_task, Task::ready(()))
  62    }
  63
  64    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  65        self.fetched_for_buffers.remove(buffer_id);
  66    }
  67
  68    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  69        if new_rules != self.rules {
  70            self.rules = new_rules;
  71            true
  72        } else {
  73            false
  74        }
  75    }
  76}
  77
  78impl Editor {
  79    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  80        let Some(provider) = self.semantics_provider.as_ref() else {
  81            return false;
  82        };
  83
  84        let mut supports = false;
  85        self.buffer().update(cx, |this, cx| {
  86            this.for_each_buffer(|buffer| {
  87                supports |= provider.supports_semantic_tokens(buffer, cx);
  88            });
  89        });
  90
  91        supports
  92    }
  93
  94    pub fn semantic_highlights_enabled(&self) -> bool {
  95        self.semantic_token_state.enabled()
  96    }
  97
  98    pub fn toggle_semantic_highlights(
  99        &mut self,
 100        _: &ToggleSemanticHighlights,
 101        _window: &mut gpui::Window,
 102        cx: &mut Context<Self>,
 103    ) {
 104        self.semantic_token_state.toggle_enabled();
 105        self.update_semantic_tokens(None, None, cx);
 106    }
 107
 108    pub(crate) fn update_semantic_tokens(
 109        &mut self,
 110        buffer_id: Option<BufferId>,
 111        for_server: Option<RefreshForServer>,
 112        cx: &mut Context<Self>,
 113    ) {
 114        if !self.mode().is_full() || !self.semantic_token_state.enabled() {
 115            self.semantic_token_state.fetched_for_buffers.clear();
 116            self.display_map.update(cx, |display_map, _| {
 117                display_map.semantic_token_highlights.clear();
 118            });
 119            self.semantic_token_state.update_task = Task::ready(());
 120            cx.notify();
 121            return;
 122        }
 123
 124        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 125        if for_server.is_some() {
 126            invalidate_semantic_highlights_for_buffers.extend(
 127                self.semantic_token_state
 128                    .fetched_for_buffers
 129                    .drain()
 130                    .map(|(buffer_id, _)| buffer_id),
 131            );
 132        }
 133
 134        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 135        else {
 136            return;
 137        };
 138
 139        let buffers_to_query = self
 140            .visible_excerpts(true, cx)
 141            .into_values()
 142            .map(|(buffer, ..)| buffer)
 143            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 144            .filter_map(|editor_buffer| {
 145                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 146                if self.registered_buffers.contains_key(&editor_buffer_id)
 147                    && language_settings(
 148                        editor_buffer.read(cx).language().map(|l| l.name()),
 149                        editor_buffer.read(cx).file(),
 150                        cx,
 151                    )
 152                    .semantic_tokens
 153                    .enabled()
 154                {
 155                    Some((editor_buffer_id, editor_buffer))
 156                } else {
 157                    None
 158                }
 159            })
 160            .collect::<HashMap<_, _>>();
 161
 162        for buffer_with_disabled_tokens in self
 163            .display_map
 164            .read(cx)
 165            .semantic_token_highlights
 166            .iter()
 167            .map(|(buffer_id, _)| *buffer_id)
 168            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 169            .filter(|buffer_id| {
 170                !self
 171                    .buffer
 172                    .read(cx)
 173                    .buffer(*buffer_id)
 174                    .is_some_and(|buffer| {
 175                        let buffer = buffer.read(cx);
 176                        language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
 177                            .semantic_tokens
 178                            .enabled()
 179                    })
 180            })
 181            .collect::<Vec<_>>()
 182        {
 183            self.semantic_token_state
 184                .invalidate_buffer(&buffer_with_disabled_tokens);
 185            self.display_map.update(cx, |display_map, _| {
 186                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 187            });
 188        }
 189
 190        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 191            cx.background_executor()
 192                .timer(Duration::from_millis(50))
 193                .await;
 194            let Some(all_semantic_tokens_task) = editor
 195                .update(cx, |editor, cx| {
 196                    buffers_to_query
 197                        .into_iter()
 198                        .filter_map(|(buffer_id, buffer)| {
 199                            let known_version = editor
 200                                .semantic_token_state
 201                                .fetched_for_buffers
 202                                .get(&buffer_id);
 203                            let query_version = buffer.read(cx).version();
 204                            if known_version.is_some_and(|known_version| {
 205                                !query_version.changed_since(known_version)
 206                            }) {
 207                                None
 208                            } else {
 209                                let task = sema.semantic_tokens(buffer, for_server, cx);
 210                                Some(async move { (buffer_id, query_version, task.await) })
 211                            }
 212                        })
 213                        .collect::<Vec<_>>()
 214                })
 215                .ok()
 216            else {
 217                return;
 218            };
 219
 220            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 221            editor
 222                .update(cx, |editor, cx| {
 223                    editor.display_map.update(cx, |display_map, _| {
 224                        for buffer_id in invalidate_semantic_highlights_for_buffers {
 225                            display_map.invalidate_semantic_highlights(buffer_id);
 226                        }
 227                    });
 228
 229                    if all_semantic_tokens.is_empty() {
 230                        return;
 231                    }
 232                    let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 233
 234                    for (buffer_id, query_version, tokens) in all_semantic_tokens {
 235                        let tokens = match tokens {
 236                            Ok(BufferSemanticTokens {
 237                                tokens: Some(tokens),
 238                            }) => tokens,
 239                            Ok(BufferSemanticTokens { tokens: None }) => {
 240                                editor.display_map.update(cx, |display_map, _| {
 241                                    display_map.invalidate_semantic_highlights(buffer_id);
 242                                });
 243                                continue;
 244                            }
 245                            Err(e) => {
 246                                log::error!(
 247                                    "Failed to fetch semantic tokens for buffer \
 248                                    {buffer_id:?}: {e:#}"
 249                                );
 250                                continue;
 251                            }
 252                        };
 253
 254                        match editor
 255                            .semantic_token_state
 256                            .fetched_for_buffers
 257                            .entry(buffer_id)
 258                        {
 259                            hash_map::Entry::Occupied(mut o) => {
 260                                if query_version.changed_since(o.get()) {
 261                                    o.insert(query_version);
 262                                } else {
 263                                    continue;
 264                                }
 265                            }
 266                            hash_map::Entry::Vacant(v) => {
 267                                v.insert(query_version);
 268                            }
 269                        }
 270
 271                        let language_name = editor
 272                            .buffer()
 273                            .read(cx)
 274                            .buffer(buffer_id)
 275                            .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 276
 277                        editor.display_map.update(cx, |display_map, cx| {
 278                            project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 279                                let mut token_highlights = Vec::new();
 280                                let mut interner = HighlightStyleInterner::default();
 281                                for (server_id, server_tokens) in tokens {
 282                                    let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 283                                        server_id,
 284                                        language_name.as_ref(),
 285                                        cx,
 286                                    ) else {
 287                                        continue;
 288                                    };
 289                                    token_highlights.reserve(2 * server_tokens.len());
 290                                    token_highlights.extend(buffer_into_editor_highlights(
 291                                        &server_tokens,
 292                                        stylizer,
 293                                        &multi_buffer_snapshot,
 294                                        &mut interner,
 295                                        cx,
 296                                    ));
 297                                }
 298
 299                                token_highlights.sort_by(|a, b| {
 300                                    a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 301                                });
 302                                display_map.semantic_token_highlights.insert(
 303                                    buffer_id,
 304                                    (Arc::from(token_highlights), Arc::new(interner)),
 305                                );
 306                            });
 307                        });
 308                    }
 309
 310                    cx.notify();
 311                })
 312                .ok();
 313        });
 314    }
 315
 316    pub(super) fn refresh_semantic_token_highlights(&mut self, cx: &mut Context<Self>) {
 317        self.semantic_token_state.fetched_for_buffers.clear();
 318        self.update_semantic_tokens(None, None, cx);
 319    }
 320}
 321
 322fn buffer_into_editor_highlights<'a, 'b>(
 323    buffer_tokens: &'a [BufferSemanticToken],
 324    stylizer: &'a SemanticTokenStylizer,
 325    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 326    interner: &'b mut HighlightStyleInterner,
 327    cx: &'a App,
 328) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 329    multi_buffer_snapshot
 330        .text_anchors_to_visible_anchors(
 331            buffer_tokens
 332                .iter()
 333                .flat_map(|token| [token.range.start, token.range.end]),
 334        )
 335        .into_iter()
 336        .tuples::<(_, _)>()
 337        .zip(buffer_tokens)
 338        .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
 339            let range = multi_buffer_start?..multi_buffer_end?;
 340            let style = convert_token(
 341                stylizer,
 342                cx.theme().syntax(),
 343                token.token_type,
 344                token.token_modifiers,
 345            )?;
 346            let style = interner.intern(style);
 347            Some(SemanticTokenHighlight {
 348                range,
 349                style,
 350                token_type: token.token_type,
 351                token_modifiers: token.token_modifiers,
 352                server_id: stylizer.server_id(),
 353            })
 354        })
 355}
 356
 357fn convert_token(
 358    stylizer: &SemanticTokenStylizer,
 359    theme: &SyntaxTheme,
 360    token_type: TokenType,
 361    modifiers: u32,
 362) -> Option<HighlightStyle> {
 363    let rules = stylizer.rules_for_token(token_type)?;
 364    let matching = rules.iter().filter(|rule| {
 365        rule.token_modifiers
 366            .iter()
 367            .all(|m| stylizer.has_modifier(modifiers, m))
 368    });
 369
 370    let mut highlight = HighlightStyle::default();
 371    let mut empty = true;
 372
 373    for rule in matching {
 374        empty = false;
 375
 376        let style = rule.style.iter().find_map(|style| theme.get_opt(style));
 377
 378        macro_rules! overwrite {
 379            (
 380                highlight.$highlight_field:ident,
 381                SemanticTokenRule::$rule_field:ident,
 382                $transform:expr $(,)?
 383            ) => {
 384                highlight.$highlight_field = rule
 385                    .$rule_field
 386                    .map($transform)
 387                    .or_else(|| style.and_then(|s| s.$highlight_field))
 388                    .or(highlight.$highlight_field)
 389            };
 390        }
 391
 392        overwrite!(
 393            highlight.color,
 394            SemanticTokenRule::foreground_color,
 395            Into::into,
 396        );
 397
 398        overwrite!(
 399            highlight.background_color,
 400            SemanticTokenRule::background_color,
 401            Into::into,
 402        );
 403
 404        overwrite!(
 405            highlight.font_weight,
 406            SemanticTokenRule::font_weight,
 407            |w| match w {
 408                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 409                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 410            },
 411        );
 412
 413        overwrite!(
 414            highlight.font_style,
 415            SemanticTokenRule::font_style,
 416            |s| match s {
 417                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 418                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 419            },
 420        );
 421
 422        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 423            UnderlineStyle {
 424                thickness: 1.0.into(),
 425                color: match u {
 426                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 427                    SemanticTokenColorOverride::InheritForeground(false) => None,
 428                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 429                },
 430                ..UnderlineStyle::default()
 431            }
 432        });
 433
 434        overwrite!(
 435            highlight.strikethrough,
 436            SemanticTokenRule::strikethrough,
 437            |s| StrikethroughStyle {
 438                thickness: 1.0.into(),
 439                color: match s {
 440                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 441                    SemanticTokenColorOverride::InheritForeground(false) => None,
 442                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 443                },
 444            },
 445        );
 446    }
 447
 448    if empty { None } else { Some(highlight) }
 449}
 450
 451#[cfg(test)]
 452mod tests {
 453    use std::{
 454        ops::{Deref as _, Range},
 455        sync::atomic::{self, AtomicUsize},
 456    };
 457
 458    use futures::StreamExt as _;
 459    use gpui::{
 460        AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, VisualTestContext,
 461    };
 462    use language::{Language, LanguageConfig, LanguageMatcher};
 463    use languages::FakeLspAdapter;
 464    use multi_buffer::{
 465        AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
 466    };
 467    use project::Project;
 468    use rope::Point;
 469    use serde_json::json;
 470    use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
 471    use workspace::{Workspace, WorkspaceHandle as _};
 472
 473    use crate::{
 474        Capability,
 475        editor_tests::{init_test, update_test_language_settings},
 476        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 477    };
 478
 479    use super::*;
 480
 481    #[gpui::test]
 482    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 483        init_test(cx, |_| {});
 484
 485        update_test_language_settings(cx, |language_settings| {
 486            language_settings.languages.0.insert(
 487                "Rust".into(),
 488                LanguageSettingsContent {
 489                    semantic_tokens: Some(SemanticTokens::Full),
 490                    ..LanguageSettingsContent::default()
 491                },
 492            );
 493        });
 494
 495        let mut cx = EditorLspTestContext::new_rust(
 496            lsp::ServerCapabilities {
 497                semantic_tokens_provider: Some(
 498                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 499                        lsp::SemanticTokensOptions {
 500                            legend: lsp::SemanticTokensLegend {
 501                                token_types: vec!["function".into()],
 502                                token_modifiers: Vec::new(),
 503                            },
 504                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 505                            ..lsp::SemanticTokensOptions::default()
 506                        },
 507                    ),
 508                ),
 509                ..lsp::ServerCapabilities::default()
 510            },
 511            cx,
 512        )
 513        .await;
 514
 515        let full_counter = Arc::new(AtomicUsize::new(0));
 516        let full_counter_clone = full_counter.clone();
 517
 518        let mut full_request = cx
 519            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 520                move |_, _, _| {
 521                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 522                    async move {
 523                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 524                            lsp::SemanticTokens {
 525                                data: vec![
 526                                    0, // delta_line
 527                                    3, // delta_start
 528                                    4, // length
 529                                    0, // token_type
 530                                    0, // token_modifiers_bitset
 531                                ],
 532                                // The server isn't capable of deltas, so even though we sent back
 533                                // a result ID, the client shouldn't request a delta.
 534                                result_id: Some("a".into()),
 535                            },
 536                        )))
 537                    }
 538                },
 539            );
 540
 541        cx.set_state("ˇfn main() {}");
 542        assert!(full_request.next().await.is_some());
 543
 544        cx.run_until_parked();
 545
 546        cx.set_state("ˇfn main() { a }");
 547        assert!(full_request.next().await.is_some());
 548
 549        cx.run_until_parked();
 550
 551        assert_eq!(
 552            extract_semantic_highlights(&cx.editor, &cx),
 553            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 554        );
 555
 556        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 557    }
 558
 559    #[gpui::test]
 560    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 561        init_test(cx, |_| {});
 562
 563        update_test_language_settings(cx, |language_settings| {
 564            language_settings.languages.0.insert(
 565                "Rust".into(),
 566                LanguageSettingsContent {
 567                    semantic_tokens: Some(SemanticTokens::Full),
 568                    ..LanguageSettingsContent::default()
 569                },
 570            );
 571        });
 572
 573        let mut cx = EditorLspTestContext::new_rust(
 574            lsp::ServerCapabilities {
 575                semantic_tokens_provider: Some(
 576                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 577                        lsp::SemanticTokensOptions {
 578                            legend: lsp::SemanticTokensLegend {
 579                                token_types: vec!["function".into()],
 580                                token_modifiers: Vec::new(),
 581                            },
 582                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 583                            ..lsp::SemanticTokensOptions::default()
 584                        },
 585                    ),
 586                ),
 587                ..lsp::ServerCapabilities::default()
 588            },
 589            cx,
 590        )
 591        .await;
 592
 593        let full_counter = Arc::new(AtomicUsize::new(0));
 594        let full_counter_clone = full_counter.clone();
 595
 596        let mut full_request = cx
 597            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 598                move |_, _, _| {
 599                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 600                    async move {
 601                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 602                            lsp::SemanticTokens {
 603                                data: vec![
 604                                    0, // delta_line
 605                                    3, // delta_start
 606                                    4, // length
 607                                    0, // token_type
 608                                    0, // token_modifiers_bitset
 609                                ],
 610                                result_id: None, // Sending back `None` forces the client to not use deltas.
 611                            },
 612                        )))
 613                    }
 614                },
 615            );
 616
 617        cx.set_state("ˇfn main() {}");
 618        assert!(full_request.next().await.is_some());
 619
 620        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 621        task.await;
 622
 623        cx.set_state("ˇfn main() { a }");
 624        assert!(full_request.next().await.is_some());
 625
 626        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 627        task.await;
 628        assert_eq!(
 629            extract_semantic_highlights(&cx.editor, &cx),
 630            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 631        );
 632        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 633    }
 634
 635    #[gpui::test]
 636    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 637        init_test(cx, |_| {});
 638
 639        update_test_language_settings(cx, |language_settings| {
 640            language_settings.languages.0.insert(
 641                "Rust".into(),
 642                LanguageSettingsContent {
 643                    semantic_tokens: Some(SemanticTokens::Full),
 644                    ..LanguageSettingsContent::default()
 645                },
 646            );
 647        });
 648
 649        let mut cx = EditorLspTestContext::new_rust(
 650            lsp::ServerCapabilities {
 651                semantic_tokens_provider: Some(
 652                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 653                        lsp::SemanticTokensOptions {
 654                            legend: lsp::SemanticTokensLegend {
 655                                token_types: vec!["function".into()],
 656                                token_modifiers: Vec::new(),
 657                            },
 658                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 659                            ..lsp::SemanticTokensOptions::default()
 660                        },
 661                    ),
 662                ),
 663                ..lsp::ServerCapabilities::default()
 664            },
 665            cx,
 666        )
 667        .await;
 668
 669        let full_counter = Arc::new(AtomicUsize::new(0));
 670        let full_counter_clone = full_counter.clone();
 671        let delta_counter = Arc::new(AtomicUsize::new(0));
 672        let delta_counter_clone = delta_counter.clone();
 673
 674        let mut full_request = cx
 675            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 676                move |_, _, _| {
 677                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 678                    async move {
 679                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 680                            lsp::SemanticTokens {
 681                                data: vec![
 682                                    0, // delta_line
 683                                    3, // delta_start
 684                                    4, // length
 685                                    0, // token_type
 686                                    0, // token_modifiers_bitset
 687                                ],
 688                                result_id: Some("a".into()),
 689                            },
 690                        )))
 691                    }
 692                },
 693            );
 694
 695        let mut delta_request = cx
 696            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 697                move |_, params, _| {
 698                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 699                    assert_eq!(params.previous_result_id, "a");
 700                    async move {
 701                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 702                            lsp::SemanticTokensDelta {
 703                                edits: Vec::new(),
 704                                result_id: Some("b".into()),
 705                            },
 706                        )))
 707                    }
 708                },
 709            );
 710
 711        // Initial request, for the empty buffer.
 712        cx.set_state("ˇfn main() {}");
 713        assert!(full_request.next().await.is_some());
 714        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 715        task.await;
 716
 717        cx.set_state("ˇfn main() { a }");
 718        assert!(delta_request.next().await.is_some());
 719        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 720        task.await;
 721
 722        assert_eq!(
 723            extract_semantic_highlights(&cx.editor, &cx),
 724            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 725        );
 726
 727        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 728        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 729    }
 730
 731    #[gpui::test]
 732    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 733        init_test(cx, |_| {});
 734
 735        update_test_language_settings(cx, |language_settings| {
 736            language_settings.languages.0.insert(
 737                "TOML".into(),
 738                LanguageSettingsContent {
 739                    semantic_tokens: Some(SemanticTokens::Full),
 740                    ..LanguageSettingsContent::default()
 741                },
 742            );
 743        });
 744
 745        let toml_language = Arc::new(Language::new(
 746            LanguageConfig {
 747                name: "TOML".into(),
 748                matcher: LanguageMatcher {
 749                    path_suffixes: vec!["toml".into()],
 750                    ..LanguageMatcher::default()
 751                },
 752                ..LanguageConfig::default()
 753            },
 754            None,
 755        ));
 756
 757        // We have 2 language servers for TOML in this test.
 758        let toml_legend_1 = lsp::SemanticTokensLegend {
 759            token_types: vec!["property".into()],
 760            token_modifiers: Vec::new(),
 761        };
 762        let toml_legend_2 = lsp::SemanticTokensLegend {
 763            token_types: vec!["number".into()],
 764            token_modifiers: Vec::new(),
 765        };
 766
 767        let app_state = cx.update(workspace::AppState::test);
 768
 769        cx.update(|cx| {
 770            assets::Assets.load_test_fonts(cx);
 771            crate::init(cx);
 772            workspace::init(app_state.clone(), cx);
 773        });
 774
 775        let project = Project::test(app_state.fs.clone(), [], cx).await;
 776        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 777
 778        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 779        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 780        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 781        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 782
 783        let mut toml_server_1 = language_registry.register_fake_lsp(
 784            toml_language.name(),
 785            FakeLspAdapter {
 786                name: "toml1",
 787                capabilities: lsp::ServerCapabilities {
 788                    semantic_tokens_provider: Some(
 789                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 790                            lsp::SemanticTokensOptions {
 791                                legend: toml_legend_1,
 792                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 793                                ..lsp::SemanticTokensOptions::default()
 794                            },
 795                        ),
 796                    ),
 797                    ..lsp::ServerCapabilities::default()
 798                },
 799                initializer: Some(Box::new({
 800                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 801                    move |fake_server| {
 802                        let full_counter = full_counter_toml_1_clone.clone();
 803                        fake_server
 804                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 805                                move |_, _| {
 806                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 807                                    async move {
 808                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 809                                            lsp::SemanticTokens {
 810                                                // highlight 'a' as a property
 811                                                data: vec![
 812                                                    0, // delta_line
 813                                                    0, // delta_start
 814                                                    1, // length
 815                                                    0, // token_type
 816                                                    0, // token_modifiers_bitset
 817                                                ],
 818                                                result_id: Some("a".into()),
 819                                            },
 820                                        )))
 821                                    }
 822                                },
 823                            );
 824                    }
 825                })),
 826                ..FakeLspAdapter::default()
 827            },
 828        );
 829        let mut toml_server_2 = language_registry.register_fake_lsp(
 830            toml_language.name(),
 831            FakeLspAdapter {
 832                name: "toml2",
 833                capabilities: lsp::ServerCapabilities {
 834                    semantic_tokens_provider: Some(
 835                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 836                            lsp::SemanticTokensOptions {
 837                                legend: toml_legend_2,
 838                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 839                                ..lsp::SemanticTokensOptions::default()
 840                            },
 841                        ),
 842                    ),
 843                    ..lsp::ServerCapabilities::default()
 844                },
 845                initializer: Some(Box::new({
 846                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 847                    move |fake_server| {
 848                        let full_counter = full_counter_toml_2_clone.clone();
 849                        fake_server
 850                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 851                                move |_, _| {
 852                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 853                                    async move {
 854                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 855                                            lsp::SemanticTokens {
 856                                                // highlight '3' as a literal
 857                                                data: vec![
 858                                                    0, // delta_line
 859                                                    4, // delta_start
 860                                                    1, // length
 861                                                    0, // token_type
 862                                                    0, // token_modifiers_bitset
 863                                                ],
 864                                                result_id: Some("a".into()),
 865                                            },
 866                                        )))
 867                                    }
 868                                },
 869                            );
 870                    }
 871                })),
 872                ..FakeLspAdapter::default()
 873            },
 874        );
 875        language_registry.add(toml_language.clone());
 876
 877        app_state
 878            .fs
 879            .as_fake()
 880            .insert_tree(
 881                EditorLspTestContext::root_path(),
 882                json!({
 883                    ".git": {},
 884                    "dir": {
 885                        "foo.toml": "a = 1\nb = 2\n",
 886                    }
 887                }),
 888            )
 889            .await;
 890
 891        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
 892        let workspace = window.root(cx).unwrap();
 893
 894        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
 895        project
 896            .update(&mut cx, |project, cx| {
 897                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 898            })
 899            .await
 900            .unwrap();
 901        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 902            .await;
 903
 904        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 905        let toml_item = workspace
 906            .update_in(&mut cx, |workspace, window, cx| {
 907                workspace.open_path(toml_file, None, true, window, cx)
 908            })
 909            .await
 910            .expect("Could not open test file");
 911
 912        let editor = cx.update(|_, cx| {
 913            toml_item
 914                .act_as::<Editor>(cx)
 915                .expect("Opened test file wasn't an editor")
 916        });
 917
 918        editor.update_in(&mut cx, |editor, window, cx| {
 919            let nav_history = workspace
 920                .read(cx)
 921                .active_pane()
 922                .read(cx)
 923                .nav_history_for_item(&cx.entity());
 924            editor.set_nav_history(Some(nav_history));
 925            window.focus(&editor.focus_handle(cx), cx)
 926        });
 927
 928        let _toml_server_1 = toml_server_1.next().await.unwrap();
 929        let _toml_server_2 = toml_server_2.next().await.unwrap();
 930
 931        // Trigger semantic tokens.
 932        editor.update_in(&mut cx, |editor, _, cx| {
 933            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 934        });
 935        cx.executor().advance_clock(Duration::from_millis(200));
 936        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
 937        cx.run_until_parked();
 938        task.await;
 939
 940        assert_eq!(
 941            extract_semantic_highlights(&editor, &cx),
 942            vec![
 943                MultiBufferOffset(0)..MultiBufferOffset(1),
 944                MultiBufferOffset(4)..MultiBufferOffset(5),
 945            ]
 946        );
 947
 948        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 949        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 950    }
 951
 952    #[gpui::test]
 953    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 954        init_test(cx, |_| {});
 955
 956        update_test_language_settings(cx, |language_settings| {
 957            language_settings.languages.0.insert(
 958                "TOML".into(),
 959                LanguageSettingsContent {
 960                    semantic_tokens: Some(SemanticTokens::Full),
 961                    ..LanguageSettingsContent::default()
 962                },
 963            );
 964            language_settings.languages.0.insert(
 965                "Rust".into(),
 966                LanguageSettingsContent {
 967                    semantic_tokens: Some(SemanticTokens::Full),
 968                    ..LanguageSettingsContent::default()
 969                },
 970            );
 971        });
 972
 973        let toml_language = Arc::new(Language::new(
 974            LanguageConfig {
 975                name: "TOML".into(),
 976                matcher: LanguageMatcher {
 977                    path_suffixes: vec!["toml".into()],
 978                    ..LanguageMatcher::default()
 979                },
 980                ..LanguageConfig::default()
 981            },
 982            None,
 983        ));
 984        let rust_language = Arc::new(Language::new(
 985            LanguageConfig {
 986                name: "Rust".into(),
 987                matcher: LanguageMatcher {
 988                    path_suffixes: vec!["rs".into()],
 989                    ..LanguageMatcher::default()
 990                },
 991                ..LanguageConfig::default()
 992            },
 993            None,
 994        ));
 995
 996        let toml_legend = lsp::SemanticTokensLegend {
 997            token_types: vec!["property".into()],
 998            token_modifiers: Vec::new(),
 999        };
1000        let rust_legend = lsp::SemanticTokensLegend {
1001            token_types: vec!["constant".into()],
1002            token_modifiers: Vec::new(),
1003        };
1004
1005        let app_state = cx.update(workspace::AppState::test);
1006
1007        cx.update(|cx| {
1008            assets::Assets.load_test_fonts(cx);
1009            crate::init(cx);
1010            workspace::init(app_state.clone(), cx);
1011        });
1012
1013        let project = Project::test(app_state.fs.clone(), [], cx).await;
1014        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1015        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1016        let full_counter_toml_clone = full_counter_toml.clone();
1017
1018        let mut toml_server = language_registry.register_fake_lsp(
1019            toml_language.name(),
1020            FakeLspAdapter {
1021                name: "toml",
1022                capabilities: lsp::ServerCapabilities {
1023                    semantic_tokens_provider: Some(
1024                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1025                            lsp::SemanticTokensOptions {
1026                                legend: toml_legend,
1027                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1028                                ..lsp::SemanticTokensOptions::default()
1029                            },
1030                        ),
1031                    ),
1032                    ..lsp::ServerCapabilities::default()
1033                },
1034                initializer: Some(Box::new({
1035                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1036                    move |fake_server| {
1037                        let full_counter = full_counter_toml_clone.clone();
1038                        fake_server
1039                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1040                                move |_, _| {
1041                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1042                                    async move {
1043                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1044                                            lsp::SemanticTokens {
1045                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1046                                                data: vec![
1047                                                    0, // delta_line (line 0)
1048                                                    0, // delta_start
1049                                                    1, // length
1050                                                    0, // token_type
1051                                                    0, // token_modifiers_bitset
1052                                                    1, // delta_line (line 1)
1053                                                    0, // delta_start
1054                                                    1, // length
1055                                                    0, // token_type
1056                                                    0, // token_modifiers_bitset
1057                                                    1, // delta_line (line 2)
1058                                                    0, // delta_start
1059                                                    1, // length
1060                                                    0, // token_type
1061                                                    0, // token_modifiers_bitset
1062                                                ],
1063                                                result_id: Some("a".into()),
1064                                            },
1065                                        )))
1066                                    }
1067                                },
1068                            );
1069                    }
1070                })),
1071                ..FakeLspAdapter::default()
1072            },
1073        );
1074        language_registry.add(toml_language.clone());
1075        let mut rust_server = language_registry.register_fake_lsp(
1076            rust_language.name(),
1077            FakeLspAdapter {
1078                name: "rust",
1079                capabilities: lsp::ServerCapabilities {
1080                    semantic_tokens_provider: Some(
1081                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1082                            lsp::SemanticTokensOptions {
1083                                legend: rust_legend,
1084                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1085                                ..lsp::SemanticTokensOptions::default()
1086                            },
1087                        ),
1088                    ),
1089                    ..lsp::ServerCapabilities::default()
1090                },
1091                ..FakeLspAdapter::default()
1092            },
1093        );
1094        language_registry.add(rust_language.clone());
1095
1096        app_state
1097            .fs
1098            .as_fake()
1099            .insert_tree(
1100                EditorLspTestContext::root_path(),
1101                json!({
1102                    ".git": {},
1103                    "dir": {
1104                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1105                        "bar.rs": "const c: usize = 3;\n",
1106                    }
1107                }),
1108            )
1109            .await;
1110
1111        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
1112        let workspace = window.root(cx).unwrap();
1113
1114        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
1115        project
1116            .update(&mut cx, |project, cx| {
1117                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1118            })
1119            .await
1120            .unwrap();
1121        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1122            .await;
1123
1124        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1125        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1126        let (toml_item, rust_item) = workspace.update_in(&mut cx, |workspace, window, cx| {
1127            (
1128                workspace.open_path(toml_file, None, true, window, cx),
1129                workspace.open_path(rust_file, None, true, window, cx),
1130            )
1131        });
1132        let toml_item = toml_item.await.expect("Could not open test file");
1133        let rust_item = rust_item.await.expect("Could not open test file");
1134
1135        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1136            (
1137                toml_item
1138                    .act_as::<Editor>(cx)
1139                    .expect("Opened test file wasn't an editor"),
1140                rust_item
1141                    .act_as::<Editor>(cx)
1142                    .expect("Opened test file wasn't an editor"),
1143            )
1144        });
1145        let toml_buffer = cx.read(|cx| {
1146            toml_editor
1147                .read(cx)
1148                .buffer()
1149                .read(cx)
1150                .as_singleton()
1151                .unwrap()
1152        });
1153        let rust_buffer = cx.read(|cx| {
1154            rust_editor
1155                .read(cx)
1156                .buffer()
1157                .read(cx)
1158                .as_singleton()
1159                .unwrap()
1160        });
1161        let multibuffer = cx.new(|cx| {
1162            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1163            multibuffer.push_excerpts(
1164                toml_buffer.clone(),
1165                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1166                cx,
1167            );
1168            multibuffer.push_excerpts(
1169                rust_buffer.clone(),
1170                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1171                cx,
1172            );
1173            multibuffer
1174        });
1175
1176        let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
1177            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1178            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1179            editor
1180        });
1181        editor.update_in(&mut cx, |editor, window, cx| {
1182            let nav_history = workspace
1183                .read(cx)
1184                .active_pane()
1185                .read(cx)
1186                .nav_history_for_item(&cx.entity());
1187            editor.set_nav_history(Some(nav_history));
1188            window.focus(&editor.focus_handle(cx), cx)
1189        });
1190
1191        let _toml_server = toml_server.next().await.unwrap();
1192        let _rust_server = rust_server.next().await.unwrap();
1193
1194        // Initial request.
1195        cx.executor().advance_clock(Duration::from_millis(200));
1196        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1197        cx.run_until_parked();
1198        task.await;
1199        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1200        cx.run_until_parked();
1201
1202        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1203        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1204        assert_eq!(
1205            extract_semantic_highlights(&editor, &cx),
1206            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1207        );
1208
1209        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1210        let toml_excerpt_id =
1211            editor.read_with(&cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1212        editor.update_in(&mut cx, |editor, _, cx| {
1213            editor.buffer().update(cx, |buffer, cx| {
1214                buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1215            });
1216        });
1217
1218        // Wait for semantic tokens to be re-fetched after expansion.
1219        cx.executor().advance_clock(Duration::from_millis(200));
1220        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1221        cx.run_until_parked();
1222        task.await;
1223
1224        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1225        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1226        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1227        assert_eq!(
1228            extract_semantic_highlights(&editor, &cx),
1229            vec![
1230                MultiBufferOffset(0)..MultiBufferOffset(1),
1231                MultiBufferOffset(6)..MultiBufferOffset(7),
1232                MultiBufferOffset(12)..MultiBufferOffset(13),
1233            ]
1234        );
1235    }
1236
1237    #[gpui::test]
1238    async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
1239        init_test(cx, |_| {});
1240
1241        update_test_language_settings(cx, |language_settings| {
1242            language_settings.languages.0.insert(
1243                "TOML".into(),
1244                LanguageSettingsContent {
1245                    semantic_tokens: Some(SemanticTokens::Full),
1246                    ..LanguageSettingsContent::default()
1247                },
1248            );
1249        });
1250
1251        let toml_language = Arc::new(Language::new(
1252            LanguageConfig {
1253                name: "TOML".into(),
1254                matcher: LanguageMatcher {
1255                    path_suffixes: vec!["toml".into()],
1256                    ..LanguageMatcher::default()
1257                },
1258                ..LanguageConfig::default()
1259            },
1260            None,
1261        ));
1262
1263        let toml_legend = lsp::SemanticTokensLegend {
1264            token_types: vec!["property".into()],
1265            token_modifiers: Vec::new(),
1266        };
1267
1268        let app_state = cx.update(workspace::AppState::test);
1269
1270        cx.update(|cx| {
1271            assets::Assets.load_test_fonts(cx);
1272            crate::init(cx);
1273            workspace::init(app_state.clone(), cx);
1274        });
1275
1276        let project = Project::test(app_state.fs.clone(), [], cx).await;
1277        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1278        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1279        let full_counter_toml_clone = full_counter_toml.clone();
1280
1281        let mut toml_server = language_registry.register_fake_lsp(
1282            toml_language.name(),
1283            FakeLspAdapter {
1284                name: "toml",
1285                capabilities: lsp::ServerCapabilities {
1286                    semantic_tokens_provider: Some(
1287                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1288                            lsp::SemanticTokensOptions {
1289                                legend: toml_legend,
1290                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1291                                ..lsp::SemanticTokensOptions::default()
1292                            },
1293                        ),
1294                    ),
1295                    ..lsp::ServerCapabilities::default()
1296                },
1297                initializer: Some(Box::new({
1298                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1299                    move |fake_server| {
1300                        let full_counter = full_counter_toml_clone.clone();
1301                        fake_server
1302                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1303                                move |_, _| {
1304                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1305                                    async move {
1306                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1307                                            lsp::SemanticTokens {
1308                                                // highlight 'a' as a property
1309                                                data: vec![
1310                                                    0, // delta_line
1311                                                    0, // delta_start
1312                                                    1, // length
1313                                                    0, // token_type
1314                                                    0, // token_modifiers_bitset
1315                                                ],
1316                                                result_id: Some("a".into()),
1317                                            },
1318                                        )))
1319                                    }
1320                                },
1321                            );
1322                    }
1323                })),
1324                ..FakeLspAdapter::default()
1325            },
1326        );
1327        language_registry.add(toml_language.clone());
1328
1329        app_state
1330            .fs
1331            .as_fake()
1332            .insert_tree(
1333                EditorLspTestContext::root_path(),
1334                json!({
1335                    ".git": {},
1336                    "dir": {
1337                        "foo.toml": "a = 1\nb = 2\n",
1338                    }
1339                }),
1340            )
1341            .await;
1342
1343        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
1344        let workspace = window.root(cx).unwrap();
1345
1346        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
1347        project
1348            .update(&mut cx, |project, cx| {
1349                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1350            })
1351            .await
1352            .unwrap();
1353        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1354            .await;
1355
1356        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1357        let toml_item = workspace
1358            .update_in(&mut cx, |workspace, window, cx| {
1359                workspace.open_path(toml_file, None, true, window, cx)
1360            })
1361            .await
1362            .expect("Could not open test file");
1363
1364        let toml_editor = cx.update(|_, cx| {
1365            toml_item
1366                .act_as::<Editor>(cx)
1367                .expect("Opened test file wasn't an editor")
1368        });
1369        let toml_buffer = cx.read(|cx| {
1370            toml_editor
1371                .read(cx)
1372                .buffer()
1373                .read(cx)
1374                .as_singleton()
1375                .unwrap()
1376        });
1377        let multibuffer = cx.new(|cx| {
1378            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1379            multibuffer.push_excerpts(
1380                toml_buffer.clone(),
1381                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1382                cx,
1383            );
1384            multibuffer.push_excerpts(
1385                toml_buffer.clone(),
1386                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1387                cx,
1388            );
1389            multibuffer
1390        });
1391
1392        let editor = workspace.update_in(&mut cx, |_, window, cx| {
1393            cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
1394        });
1395        editor.update_in(&mut cx, |editor, window, cx| {
1396            let nav_history = workspace
1397                .read(cx)
1398                .active_pane()
1399                .read(cx)
1400                .nav_history_for_item(&cx.entity());
1401            editor.set_nav_history(Some(nav_history));
1402            window.focus(&editor.focus_handle(cx), cx)
1403        });
1404
1405        let _toml_server = toml_server.next().await.unwrap();
1406
1407        // Initial request.
1408        cx.executor().advance_clock(Duration::from_millis(200));
1409        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1410        cx.run_until_parked();
1411        task.await;
1412        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1413
1414        // Edit two parts of the multibuffer, which both map to the same buffer.
1415        //
1416        // Without debouncing, this grabs semantic tokens 4 times (twice for the
1417        // toml editor, and twice for the multibuffer).
1418        editor.update_in(&mut cx, |editor, _, cx| {
1419            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
1420            editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
1421        });
1422        cx.executor().advance_clock(Duration::from_millis(200));
1423        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1424        cx.run_until_parked();
1425        task.await;
1426        assert_eq!(
1427            extract_semantic_highlights(&editor, &cx),
1428            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1429        );
1430
1431        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
1432    }
1433
1434    fn extract_semantic_highlights(
1435        editor: &Entity<Editor>,
1436        cx: &TestAppContext,
1437    ) -> Vec<Range<MultiBufferOffset>> {
1438        editor.read_with(cx, |editor, cx| {
1439            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1440            editor
1441                .display_map
1442                .read(cx)
1443                .semantic_token_highlights
1444                .iter()
1445                .flat_map(|(_, (v, _))| v.iter())
1446                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1447                .collect()
1448        })
1449    }
1450
1451    #[gpui::test]
1452    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1453        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1454        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1455
1456        init_test(cx, |_| {});
1457
1458        update_test_language_settings(cx, |language_settings| {
1459            language_settings.languages.0.insert(
1460                "Rust".into(),
1461                LanguageSettingsContent {
1462                    semantic_tokens: Some(SemanticTokens::Full),
1463                    ..LanguageSettingsContent::default()
1464                },
1465            );
1466        });
1467
1468        let mut cx = EditorLspTestContext::new_rust(
1469            lsp::ServerCapabilities {
1470                semantic_tokens_provider: Some(
1471                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1472                        lsp::SemanticTokensOptions {
1473                            legend: lsp::SemanticTokensLegend {
1474                                token_types: Vec::from(["function".into()]),
1475                                token_modifiers: Vec::new(),
1476                            },
1477                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1478                            ..lsp::SemanticTokensOptions::default()
1479                        },
1480                    ),
1481                ),
1482                ..lsp::ServerCapabilities::default()
1483            },
1484            cx,
1485        )
1486        .await;
1487
1488        let mut full_request = cx
1489            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1490                move |_, _, _| {
1491                    async move {
1492                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1493                            lsp::SemanticTokens {
1494                                data: vec![
1495                                    0, // delta_line
1496                                    3, // delta_start
1497                                    4, // length
1498                                    0, // token_type (function)
1499                                    0, // token_modifiers_bitset
1500                                ],
1501                                result_id: None,
1502                            },
1503                        )))
1504                    }
1505                },
1506            );
1507
1508        // Trigger initial semantic tokens fetch
1509        cx.set_state("ˇfn main() {}");
1510        full_request.next().await;
1511        cx.run_until_parked();
1512
1513        // Verify initial highlights exist (with no custom color yet)
1514        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1515        assert_eq!(
1516            initial_ranges,
1517            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1518            "Should have initial semantic token highlights"
1519        );
1520        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1521        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1522        // Initial color should be None or theme default (not red or blue)
1523        let initial_color = initial_styles[0].color;
1524
1525        // Set a custom foreground color for function tokens via settings.json
1526        let red_color = Rgba {
1527            r: 1.0,
1528            g: 0.0,
1529            b: 0.0,
1530            a: 1.0,
1531        };
1532        cx.update(|_, cx| {
1533            SettingsStore::update_global(cx, |store, cx| {
1534                store.update_user_settings(cx, |settings| {
1535                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1536                        semantic_token_rules: Some(SemanticTokenRules {
1537                            rules: Vec::from([SemanticTokenRule {
1538                                token_type: Some("function".to_string()),
1539                                foreground_color: Some(red_color),
1540                                ..SemanticTokenRule::default()
1541                            }]),
1542                        }),
1543                        ..GlobalLspSettingsContent::default()
1544                    });
1545                });
1546            });
1547        });
1548
1549        // Trigger a refetch by making an edit (which forces semantic tokens update)
1550        cx.set_state("ˇfn main() { }");
1551        full_request.next().await;
1552        cx.run_until_parked();
1553
1554        // Verify the highlights now have the custom red color
1555        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1556        assert_eq!(
1557            styles_after_settings_change.len(),
1558            1,
1559            "Should still have one highlight"
1560        );
1561        assert_eq!(
1562            styles_after_settings_change[0].color,
1563            Some(Hsla::from(red_color)),
1564            "Highlight should have the custom red color from settings.json"
1565        );
1566        assert_ne!(
1567            styles_after_settings_change[0].color, initial_color,
1568            "Color should have changed from initial"
1569        );
1570    }
1571
1572    #[gpui::test]
1573    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1574        use collections::IndexMap;
1575        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1576        use theme::{HighlightStyleContent, ThemeStyleContent};
1577
1578        init_test(cx, |_| {});
1579
1580        update_test_language_settings(cx, |language_settings| {
1581            language_settings.languages.0.insert(
1582                "Rust".into(),
1583                LanguageSettingsContent {
1584                    semantic_tokens: Some(SemanticTokens::Full),
1585                    ..LanguageSettingsContent::default()
1586                },
1587            );
1588        });
1589
1590        let mut cx = EditorLspTestContext::new_rust(
1591            lsp::ServerCapabilities {
1592                semantic_tokens_provider: Some(
1593                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1594                        lsp::SemanticTokensOptions {
1595                            legend: lsp::SemanticTokensLegend {
1596                                token_types: Vec::from(["function".into()]),
1597                                token_modifiers: Vec::new(),
1598                            },
1599                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1600                            ..lsp::SemanticTokensOptions::default()
1601                        },
1602                    ),
1603                ),
1604                ..lsp::ServerCapabilities::default()
1605            },
1606            cx,
1607        )
1608        .await;
1609
1610        let mut full_request = cx
1611            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1612                move |_, _, _| async move {
1613                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1614                        lsp::SemanticTokens {
1615                            data: vec![
1616                                0, // delta_line
1617                                3, // delta_start
1618                                4, // length
1619                                0, // token_type (function)
1620                                0, // token_modifiers_bitset
1621                            ],
1622                            result_id: None,
1623                        },
1624                    )))
1625                },
1626            );
1627
1628        cx.set_state("ˇfn main() {}");
1629        full_request.next().await;
1630        cx.run_until_parked();
1631
1632        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1633        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1634        let initial_color = initial_styles[0].color;
1635
1636        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1637        // which fires theme_changed → refresh_semantic_token_highlights.
1638        let red_color: Hsla = Rgba {
1639            r: 1.0,
1640            g: 0.0,
1641            b: 0.0,
1642            a: 1.0,
1643        }
1644        .into();
1645        cx.update(|_, cx| {
1646            SettingsStore::update_global(cx, |store, cx| {
1647                store.update_user_settings(cx, |settings| {
1648                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1649                        syntax: IndexMap::from_iter([(
1650                            "function".to_string(),
1651                            HighlightStyleContent {
1652                                color: Some("#ff0000".to_string()),
1653                                background_color: None,
1654                                font_style: None,
1655                                font_weight: None,
1656                            },
1657                        )]),
1658                        ..ThemeStyleContent::default()
1659                    });
1660                });
1661            });
1662        });
1663
1664        cx.executor().advance_clock(Duration::from_millis(200));
1665        cx.run_until_parked();
1666
1667        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1668        assert_eq!(styles_after_override.len(), 1);
1669        assert_eq!(
1670            styles_after_override[0].color,
1671            Some(red_color),
1672            "Highlight should have red color from theme override"
1673        );
1674        assert_ne!(
1675            styles_after_override[0].color, initial_color,
1676            "Color should have changed from initial"
1677        );
1678
1679        // Changing the override to a different color also restyles.
1680        let blue_color: Hsla = Rgba {
1681            r: 0.0,
1682            g: 0.0,
1683            b: 1.0,
1684            a: 1.0,
1685        }
1686        .into();
1687        cx.update(|_, cx| {
1688            SettingsStore::update_global(cx, |store, cx| {
1689                store.update_user_settings(cx, |settings| {
1690                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1691                        syntax: IndexMap::from_iter([(
1692                            "function".to_string(),
1693                            HighlightStyleContent {
1694                                color: Some("#0000ff".to_string()),
1695                                background_color: None,
1696                                font_style: None,
1697                                font_weight: None,
1698                            },
1699                        )]),
1700                        ..ThemeStyleContent::default()
1701                    });
1702                });
1703            });
1704        });
1705
1706        cx.executor().advance_clock(Duration::from_millis(200));
1707        cx.run_until_parked();
1708
1709        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1710        assert_eq!(styles_after_second_override.len(), 1);
1711        assert_eq!(
1712            styles_after_second_override[0].color,
1713            Some(blue_color),
1714            "Highlight should have blue color from updated theme override"
1715        );
1716
1717        // Removing overrides reverts to the original theme color.
1718        cx.update(|_, cx| {
1719            SettingsStore::update_global(cx, |store, cx| {
1720                store.update_user_settings(cx, |settings| {
1721                    settings.theme.experimental_theme_overrides = None;
1722                });
1723            });
1724        });
1725
1726        cx.executor().advance_clock(Duration::from_millis(200));
1727        cx.run_until_parked();
1728
1729        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1730        assert_eq!(styles_after_clear.len(), 1);
1731        assert_eq!(
1732            styles_after_clear[0].color, initial_color,
1733            "Highlight should revert to initial color after clearing overrides"
1734        );
1735    }
1736
1737    #[gpui::test]
1738    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1739        use collections::IndexMap;
1740        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1741        use theme::{HighlightStyleContent, ThemeStyleContent};
1742        use ui::ActiveTheme as _;
1743
1744        init_test(cx, |_| {});
1745
1746        update_test_language_settings(cx, |language_settings| {
1747            language_settings.languages.0.insert(
1748                "Rust".into(),
1749                LanguageSettingsContent {
1750                    semantic_tokens: Some(SemanticTokens::Full),
1751                    ..LanguageSettingsContent::default()
1752                },
1753            );
1754        });
1755
1756        let mut cx = EditorLspTestContext::new_rust(
1757            lsp::ServerCapabilities {
1758                semantic_tokens_provider: Some(
1759                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1760                        lsp::SemanticTokensOptions {
1761                            legend: lsp::SemanticTokensLegend {
1762                                token_types: Vec::from(["function".into()]),
1763                                token_modifiers: Vec::new(),
1764                            },
1765                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1766                            ..lsp::SemanticTokensOptions::default()
1767                        },
1768                    ),
1769                ),
1770                ..lsp::ServerCapabilities::default()
1771            },
1772            cx,
1773        )
1774        .await;
1775
1776        let mut full_request = cx
1777            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1778                move |_, _, _| async move {
1779                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1780                        lsp::SemanticTokens {
1781                            data: vec![
1782                                0, // delta_line
1783                                3, // delta_start
1784                                4, // length
1785                                0, // token_type (function)
1786                                0, // token_modifiers_bitset
1787                            ],
1788                            result_id: None,
1789                        },
1790                    )))
1791                },
1792            );
1793
1794        cx.set_state("ˇfn main() {}");
1795        full_request.next().await;
1796        cx.run_until_parked();
1797
1798        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1799        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1800        let initial_color = initial_styles[0].color;
1801
1802        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1803        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1804        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1805        let green_color: Hsla = Rgba {
1806            r: 0.0,
1807            g: 1.0,
1808            b: 0.0,
1809            a: 1.0,
1810        }
1811        .into();
1812        cx.update(|_, cx| {
1813            SettingsStore::update_global(cx, |store, cx| {
1814                store.update_user_settings(cx, |settings| {
1815                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1816                        theme_name.clone(),
1817                        ThemeStyleContent {
1818                            syntax: IndexMap::from_iter([(
1819                                "function".to_string(),
1820                                HighlightStyleContent {
1821                                    color: Some("#00ff00".to_string()),
1822                                    background_color: None,
1823                                    font_style: None,
1824                                    font_weight: None,
1825                                },
1826                            )]),
1827                            ..ThemeStyleContent::default()
1828                        },
1829                    )]);
1830                });
1831            });
1832        });
1833
1834        cx.executor().advance_clock(Duration::from_millis(200));
1835        cx.run_until_parked();
1836
1837        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1838        assert_eq!(styles_after_override.len(), 1);
1839        assert_eq!(
1840            styles_after_override[0].color,
1841            Some(green_color),
1842            "Highlight should have green color from per-theme override"
1843        );
1844        assert_ne!(
1845            styles_after_override[0].color, initial_color,
1846            "Color should have changed from initial"
1847        );
1848    }
1849
1850    #[gpui::test]
1851    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1852        init_test(cx, |_| {});
1853
1854        update_test_language_settings(cx, |language_settings| {
1855            language_settings.languages.0.insert(
1856                "Rust".into(),
1857                LanguageSettingsContent {
1858                    semantic_tokens: Some(SemanticTokens::Full),
1859                    ..LanguageSettingsContent::default()
1860                },
1861            );
1862        });
1863
1864        let mut cx = EditorLspTestContext::new_rust(
1865            lsp::ServerCapabilities {
1866                semantic_tokens_provider: Some(
1867                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1868                        lsp::SemanticTokensOptions {
1869                            legend: lsp::SemanticTokensLegend {
1870                                token_types: vec!["function".into()],
1871                                token_modifiers: Vec::new(),
1872                            },
1873                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1874                            ..lsp::SemanticTokensOptions::default()
1875                        },
1876                    ),
1877                ),
1878                ..lsp::ServerCapabilities::default()
1879            },
1880            cx,
1881        )
1882        .await;
1883
1884        let mut full_request = cx
1885            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1886                move |_, _, _| async move {
1887                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1888                        lsp::SemanticTokens {
1889                            data: vec![
1890                                0, // delta_line
1891                                3, // delta_start
1892                                4, // length
1893                                0, // token_type
1894                                0, // token_modifiers_bitset
1895                            ],
1896                            result_id: None,
1897                        },
1898                    )))
1899                },
1900            );
1901
1902        cx.set_state("ˇfn main() {}");
1903        assert!(full_request.next().await.is_some());
1904        cx.run_until_parked();
1905
1906        assert_eq!(
1907            extract_semantic_highlights(&cx.editor, &cx),
1908            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1909            "Semantic tokens should be present before stopping the server"
1910        );
1911
1912        cx.update_editor(|editor, _, cx| {
1913            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1914            editor.project.as_ref().unwrap().update(cx, |project, cx| {
1915                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1916            })
1917        });
1918        cx.executor().advance_clock(Duration::from_millis(200));
1919        cx.run_until_parked();
1920
1921        assert_eq!(
1922            extract_semantic_highlights(&cx.editor, &cx),
1923            Vec::new(),
1924            "Semantic tokens should be cleared after stopping the server"
1925        );
1926    }
1927
1928    #[gpui::test]
1929    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1930        init_test(cx, |_| {});
1931
1932        update_test_language_settings(cx, |language_settings| {
1933            language_settings.languages.0.insert(
1934                "Rust".into(),
1935                LanguageSettingsContent {
1936                    semantic_tokens: Some(SemanticTokens::Full),
1937                    ..LanguageSettingsContent::default()
1938                },
1939            );
1940        });
1941
1942        let mut cx = EditorLspTestContext::new_rust(
1943            lsp::ServerCapabilities {
1944                semantic_tokens_provider: Some(
1945                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1946                        lsp::SemanticTokensOptions {
1947                            legend: lsp::SemanticTokensLegend {
1948                                token_types: vec!["function".into()],
1949                                token_modifiers: Vec::new(),
1950                            },
1951                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1952                            ..lsp::SemanticTokensOptions::default()
1953                        },
1954                    ),
1955                ),
1956                ..lsp::ServerCapabilities::default()
1957            },
1958            cx,
1959        )
1960        .await;
1961
1962        let mut full_request = cx
1963            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1964                move |_, _, _| async move {
1965                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1966                        lsp::SemanticTokens {
1967                            data: vec![
1968                                0, // delta_line
1969                                3, // delta_start
1970                                4, // length
1971                                0, // token_type
1972                                0, // token_modifiers_bitset
1973                            ],
1974                            result_id: None,
1975                        },
1976                    )))
1977                },
1978            );
1979
1980        cx.set_state("ˇfn main() {}");
1981        assert!(full_request.next().await.is_some());
1982        cx.run_until_parked();
1983
1984        assert_eq!(
1985            extract_semantic_highlights(&cx.editor, &cx),
1986            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1987            "Semantic tokens should be present before disabling the setting"
1988        );
1989
1990        update_test_language_settings(&mut cx, |language_settings| {
1991            language_settings.languages.0.insert(
1992                "Rust".into(),
1993                LanguageSettingsContent {
1994                    semantic_tokens: Some(SemanticTokens::Off),
1995                    ..LanguageSettingsContent::default()
1996                },
1997            );
1998        });
1999        cx.executor().advance_clock(Duration::from_millis(200));
2000        cx.run_until_parked();
2001
2002        assert_eq!(
2003            extract_semantic_highlights(&cx.editor, &cx),
2004            Vec::new(),
2005            "Semantic tokens should be cleared after disabling the setting"
2006        );
2007    }
2008
2009    fn extract_semantic_highlight_styles(
2010        editor: &Entity<Editor>,
2011        cx: &TestAppContext,
2012    ) -> Vec<HighlightStyle> {
2013        editor.read_with(cx, |editor, cx| {
2014            editor
2015                .display_map
2016                .read(cx)
2017                .semantic_token_highlights
2018                .iter()
2019                .flat_map(|(_, (v, interner))| {
2020                    v.iter().map(|highlights| interner[highlights.style])
2021                })
2022                .collect()
2023        })
2024    }
2025}