semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use language::language_settings::language_settings;
   9use project::{
  10    lsp_store::{
  11        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  12        TokenType,
  13    },
  14    project_settings::ProjectSettings,
  15};
  16use settings::{
  17    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
  18    SemanticTokenRules, Settings as _,
  19};
  20use text::BufferId;
  21use theme::SyntaxTheme;
  22use ui::ActiveTheme as _;
  23
  24use crate::{
  25    Editor,
  26    actions::ToggleSemanticHighlights,
  27    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  28};
  29
  30pub(super) struct SemanticTokenState {
  31    rules: SemanticTokenRules,
  32    enabled: bool,
  33    update_task: Task<()>,
  34    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  35}
  36
  37impl SemanticTokenState {
  38    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  39        Self {
  40            rules: ProjectSettings::get_global(cx)
  41                .global_lsp_settings
  42                .semantic_token_rules
  43                .clone(),
  44            enabled,
  45            update_task: Task::ready(()),
  46            fetched_for_buffers: HashMap::default(),
  47        }
  48    }
  49
  50    pub(super) fn enabled(&self) -> bool {
  51        self.enabled
  52    }
  53
  54    pub(super) fn toggle_enabled(&mut self) {
  55        self.enabled = !self.enabled;
  56    }
  57
  58    #[cfg(test)]
  59    pub(super) fn take_update_task(&mut self) -> Task<()> {
  60        std::mem::replace(&mut self.update_task, Task::ready(()))
  61    }
  62
  63    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  64        self.fetched_for_buffers.remove(buffer_id);
  65    }
  66
  67    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  68        if new_rules != self.rules {
  69            self.rules = new_rules;
  70            true
  71        } else {
  72            false
  73        }
  74    }
  75}
  76
  77impl Editor {
  78    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  79        let Some(provider) = self.semantics_provider.as_ref() else {
  80            return false;
  81        };
  82
  83        let mut supports = false;
  84        self.buffer().update(cx, |this, cx| {
  85            this.for_each_buffer(|buffer| {
  86                supports |= provider.supports_semantic_tokens(buffer, cx);
  87            });
  88        });
  89
  90        supports
  91    }
  92
  93    pub fn semantic_highlights_enabled(&self) -> bool {
  94        self.semantic_token_state.enabled()
  95    }
  96
  97    pub fn toggle_semantic_highlights(
  98        &mut self,
  99        _: &ToggleSemanticHighlights,
 100        _window: &mut gpui::Window,
 101        cx: &mut Context<Self>,
 102    ) {
 103        self.semantic_token_state.toggle_enabled();
 104        self.update_semantic_tokens(None, None, cx);
 105    }
 106
 107    pub(crate) fn update_semantic_tokens(
 108        &mut self,
 109        buffer_id: Option<BufferId>,
 110        for_server: Option<RefreshForServer>,
 111        cx: &mut Context<Self>,
 112    ) {
 113        if !self.mode().is_full() || !self.semantic_token_state.enabled() {
 114            self.semantic_token_state.fetched_for_buffers.clear();
 115            self.display_map.update(cx, |display_map, _| {
 116                display_map.semantic_token_highlights.clear();
 117            });
 118            self.semantic_token_state.update_task = Task::ready(());
 119            cx.notify();
 120            return;
 121        }
 122
 123        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 124        if for_server.is_some() {
 125            invalidate_semantic_highlights_for_buffers.extend(
 126                self.semantic_token_state
 127                    .fetched_for_buffers
 128                    .drain()
 129                    .map(|(buffer_id, _)| buffer_id),
 130            );
 131        }
 132
 133        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 134        else {
 135            return;
 136        };
 137
 138        let buffers_to_query = self
 139            .visible_excerpts(true, cx)
 140            .into_values()
 141            .map(|(buffer, ..)| buffer)
 142            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 143            .filter_map(|editor_buffer| {
 144                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 145                if self.registered_buffers.contains_key(&editor_buffer_id)
 146                    && language_settings(
 147                        editor_buffer.read(cx).language().map(|l| l.name()),
 148                        editor_buffer.read(cx).file(),
 149                        cx,
 150                    )
 151                    .semantic_tokens
 152                    .enabled()
 153                {
 154                    Some((editor_buffer_id, editor_buffer))
 155                } else {
 156                    None
 157                }
 158            })
 159            .collect::<HashMap<_, _>>();
 160
 161        for buffer_with_disabled_tokens in self
 162            .display_map
 163            .read(cx)
 164            .semantic_token_highlights
 165            .iter()
 166            .map(|(buffer_id, _)| *buffer_id)
 167            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 168            .filter(|buffer_id| {
 169                !self
 170                    .buffer
 171                    .read(cx)
 172                    .buffer(*buffer_id)
 173                    .is_some_and(|buffer| {
 174                        let buffer = buffer.read(cx);
 175                        language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
 176                            .semantic_tokens
 177                            .enabled()
 178                    })
 179            })
 180            .collect::<Vec<_>>()
 181        {
 182            self.semantic_token_state
 183                .invalidate_buffer(&buffer_with_disabled_tokens);
 184            self.display_map.update(cx, |display_map, _| {
 185                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 186            });
 187        }
 188
 189        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 190            cx.background_executor()
 191                .timer(Duration::from_millis(50))
 192                .await;
 193            let Some(all_semantic_tokens_task) = editor
 194                .update(cx, |editor, cx| {
 195                    buffers_to_query
 196                        .into_iter()
 197                        .filter_map(|(buffer_id, buffer)| {
 198                            let known_version =
 199                                editor.semantic_token_state.fetched_for_buffers.get(&buffer_id);
 200                            let query_version = buffer.read(cx).version();
 201                            if known_version.is_some_and(|known_version| {
 202                                !query_version.changed_since(known_version)
 203                            }) {
 204                                None
 205                            } else {
 206                                let task = sema.semantic_tokens(buffer, for_server, cx);
 207                                Some(async move { (buffer_id, query_version, task.await) })
 208                            }
 209                        })
 210                        .collect::<Vec<_>>()
 211                })
 212                .ok()
 213            else {
 214                return;
 215            };
 216
 217            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 218            editor.update(cx, |editor, cx| {
 219                editor.display_map.update(cx, |display_map, _| {
 220                    for buffer_id in invalidate_semantic_highlights_for_buffers {
 221                        display_map.invalidate_semantic_highlights(buffer_id);
 222                    }
 223                });
 224
 225
 226                if all_semantic_tokens.is_empty() {
 227                    return;
 228                }
 229                let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 230                let all_excerpts = editor.buffer().read(cx).excerpt_ids();
 231
 232                for (buffer_id, query_version, tokens) in all_semantic_tokens {
 233                    let tokens = match tokens {
 234                        Ok(BufferSemanticTokens { tokens: Some(tokens) }) => {
 235                            tokens
 236                        },
 237                        Ok(BufferSemanticTokens { tokens: None }) => {
 238                            editor.display_map.update(cx, |display_map, _| {
 239                                display_map.invalidate_semantic_highlights(buffer_id);
 240                            });
 241                            continue;
 242                        },
 243                        Err(e) => {
 244                            log::error!("Failed to fetch semantic tokens for buffer {buffer_id:?}: {e:#}");
 245                            continue;
 246                        },
 247                    };
 248
 249                    match editor.semantic_token_state.fetched_for_buffers.entry(buffer_id) {
 250                        hash_map::Entry::Occupied(mut o) => {
 251                            if query_version.changed_since(o.get()) {
 252                                o.insert(query_version);
 253                            } else {
 254                                continue;
 255                            }
 256                        },
 257                        hash_map::Entry::Vacant(v) => {
 258                            v.insert(query_version);
 259                        },
 260                    }
 261
 262                    let language_name = editor
 263                        .buffer()
 264                        .read(cx)
 265                        .buffer(buffer_id)
 266                        .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 267
 268                    editor.display_map.update(cx, |display_map, cx| {
 269                        project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 270                            let mut token_highlights = Vec::new();
 271                            let mut interner = HighlightStyleInterner::default();
 272                            for (server_id, server_tokens) in tokens {
 273                                let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 274                                    server_id,
 275                                    language_name.as_ref(),
 276                                    cx,
 277                                )
 278                                else {
 279                                    continue;
 280                                };
 281                                token_highlights.extend(buffer_into_editor_highlights(
 282                                    &server_tokens,
 283                                    stylizer,
 284                                    &all_excerpts,
 285                                    &multi_buffer_snapshot,
 286                                    &mut interner,
 287                                    cx,
 288                                ));
 289                            }
 290
 291                            token_highlights.sort_by(|a, b| {
 292                                a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 293                            });
 294                            display_map
 295                                .semantic_token_highlights
 296                                .insert(buffer_id, (Arc::from(token_highlights), Arc::new(interner)));
 297                        });
 298                    });
 299                }
 300
 301                cx.notify();
 302            }).ok();
 303        });
 304    }
 305
 306    pub(super) fn refresh_semantic_token_highlights(&mut self, cx: &mut Context<Self>) {
 307        self.semantic_token_state.fetched_for_buffers.clear();
 308        self.update_semantic_tokens(None, None, cx);
 309    }
 310}
 311
 312fn buffer_into_editor_highlights<'a, 'b>(
 313    buffer_tokens: &'a [BufferSemanticToken],
 314    stylizer: &'a SemanticTokenStylizer,
 315    all_excerpts: &'a [multi_buffer::ExcerptId],
 316    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 317    interner: &'b mut HighlightStyleInterner,
 318    cx: &'a App,
 319) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 320    buffer_tokens.iter().filter_map(|token| {
 321        let multi_buffer_start = all_excerpts.iter().find_map(|&excerpt_id| {
 322            multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.start)
 323        })?;
 324        let multi_buffer_end = all_excerpts.iter().find_map(|&excerpt_id| {
 325            multi_buffer_snapshot.anchor_in_excerpt(excerpt_id, token.range.end)
 326        })?;
 327
 328        let style = convert_token(
 329            stylizer,
 330            cx.theme().syntax(),
 331            token.token_type,
 332            token.token_modifiers,
 333        )?;
 334        let style = interner.intern(style);
 335        Some(SemanticTokenHighlight {
 336            range: multi_buffer_start..multi_buffer_end,
 337            style,
 338            token_type: token.token_type,
 339            token_modifiers: token.token_modifiers,
 340            server_id: stylizer.server_id(),
 341        })
 342    })
 343}
 344
 345fn convert_token(
 346    stylizer: &SemanticTokenStylizer,
 347    theme: &SyntaxTheme,
 348    token_type: TokenType,
 349    modifiers: u32,
 350) -> Option<HighlightStyle> {
 351    let rules = stylizer.rules_for_token(token_type)?;
 352    let matching = rules.iter().filter(|rule| {
 353        rule.token_modifiers
 354            .iter()
 355            .all(|m| stylizer.has_modifier(modifiers, m))
 356    });
 357
 358    let mut highlight = HighlightStyle::default();
 359    let mut empty = true;
 360
 361    for rule in matching {
 362        empty = false;
 363
 364        let style = rule.style.iter().find_map(|style| theme.get_opt(style));
 365
 366        macro_rules! overwrite {
 367            (
 368                highlight.$highlight_field:ident,
 369                SemanticTokenRule::$rule_field:ident,
 370                $transform:expr $(,)?
 371            ) => {
 372                highlight.$highlight_field = rule
 373                    .$rule_field
 374                    .map($transform)
 375                    .or_else(|| style.and_then(|s| s.$highlight_field))
 376                    .or(highlight.$highlight_field)
 377            };
 378        }
 379
 380        overwrite!(
 381            highlight.color,
 382            SemanticTokenRule::foreground_color,
 383            Into::into,
 384        );
 385
 386        overwrite!(
 387            highlight.background_color,
 388            SemanticTokenRule::background_color,
 389            Into::into,
 390        );
 391
 392        overwrite!(
 393            highlight.font_weight,
 394            SemanticTokenRule::font_weight,
 395            |w| match w {
 396                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 397                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 398            },
 399        );
 400
 401        overwrite!(
 402            highlight.font_style,
 403            SemanticTokenRule::font_style,
 404            |s| match s {
 405                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 406                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 407            },
 408        );
 409
 410        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 411            UnderlineStyle {
 412                thickness: 1.0.into(),
 413                color: match u {
 414                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 415                    SemanticTokenColorOverride::InheritForeground(false) => None,
 416                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 417                },
 418                ..UnderlineStyle::default()
 419            }
 420        });
 421
 422        overwrite!(
 423            highlight.strikethrough,
 424            SemanticTokenRule::strikethrough,
 425            |s| StrikethroughStyle {
 426                thickness: 1.0.into(),
 427                color: match s {
 428                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 429                    SemanticTokenColorOverride::InheritForeground(false) => None,
 430                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 431                },
 432            },
 433        );
 434    }
 435
 436    if empty { None } else { Some(highlight) }
 437}
 438
 439#[cfg(test)]
 440mod tests {
 441    use std::{
 442        ops::{Deref as _, Range},
 443        sync::atomic::{self, AtomicUsize},
 444    };
 445
 446    use futures::StreamExt as _;
 447    use gpui::{
 448        AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext, VisualTestContext,
 449    };
 450    use language::{Language, LanguageConfig, LanguageMatcher};
 451    use languages::FakeLspAdapter;
 452    use multi_buffer::{
 453        AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
 454    };
 455    use project::Project;
 456    use rope::Point;
 457    use serde_json::json;
 458    use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
 459    use workspace::{Workspace, WorkspaceHandle as _};
 460
 461    use crate::{
 462        Capability,
 463        editor_tests::{init_test, update_test_language_settings},
 464        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 465    };
 466
 467    use super::*;
 468
 469    #[gpui::test]
 470    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 471        init_test(cx, |_| {});
 472
 473        update_test_language_settings(cx, |language_settings| {
 474            language_settings.languages.0.insert(
 475                "Rust".into(),
 476                LanguageSettingsContent {
 477                    semantic_tokens: Some(SemanticTokens::Full),
 478                    ..LanguageSettingsContent::default()
 479                },
 480            );
 481        });
 482
 483        let mut cx = EditorLspTestContext::new_rust(
 484            lsp::ServerCapabilities {
 485                semantic_tokens_provider: Some(
 486                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 487                        lsp::SemanticTokensOptions {
 488                            legend: lsp::SemanticTokensLegend {
 489                                token_types: vec!["function".into()],
 490                                token_modifiers: Vec::new(),
 491                            },
 492                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 493                            ..lsp::SemanticTokensOptions::default()
 494                        },
 495                    ),
 496                ),
 497                ..lsp::ServerCapabilities::default()
 498            },
 499            cx,
 500        )
 501        .await;
 502
 503        let full_counter = Arc::new(AtomicUsize::new(0));
 504        let full_counter_clone = full_counter.clone();
 505
 506        let mut full_request = cx
 507            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 508                move |_, _, _| {
 509                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 510                    async move {
 511                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 512                            lsp::SemanticTokens {
 513                                data: vec![
 514                                    0, // delta_line
 515                                    3, // delta_start
 516                                    4, // length
 517                                    0, // token_type
 518                                    0, // token_modifiers_bitset
 519                                ],
 520                                // The server isn't capable of deltas, so even though we sent back
 521                                // a result ID, the client shouldn't request a delta.
 522                                result_id: Some("a".into()),
 523                            },
 524                        )))
 525                    }
 526                },
 527            );
 528
 529        cx.set_state("ˇfn main() {}");
 530        assert!(full_request.next().await.is_some());
 531
 532        cx.run_until_parked();
 533
 534        cx.set_state("ˇfn main() { a }");
 535        assert!(full_request.next().await.is_some());
 536
 537        cx.run_until_parked();
 538
 539        assert_eq!(
 540            extract_semantic_highlights(&cx.editor, &cx),
 541            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 542        );
 543
 544        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 545    }
 546
 547    #[gpui::test]
 548    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 549        init_test(cx, |_| {});
 550
 551        update_test_language_settings(cx, |language_settings| {
 552            language_settings.languages.0.insert(
 553                "Rust".into(),
 554                LanguageSettingsContent {
 555                    semantic_tokens: Some(SemanticTokens::Full),
 556                    ..LanguageSettingsContent::default()
 557                },
 558            );
 559        });
 560
 561        let mut cx = EditorLspTestContext::new_rust(
 562            lsp::ServerCapabilities {
 563                semantic_tokens_provider: Some(
 564                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 565                        lsp::SemanticTokensOptions {
 566                            legend: lsp::SemanticTokensLegend {
 567                                token_types: vec!["function".into()],
 568                                token_modifiers: Vec::new(),
 569                            },
 570                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 571                            ..lsp::SemanticTokensOptions::default()
 572                        },
 573                    ),
 574                ),
 575                ..lsp::ServerCapabilities::default()
 576            },
 577            cx,
 578        )
 579        .await;
 580
 581        let full_counter = Arc::new(AtomicUsize::new(0));
 582        let full_counter_clone = full_counter.clone();
 583
 584        let mut full_request = cx
 585            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 586                move |_, _, _| {
 587                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 588                    async move {
 589                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 590                            lsp::SemanticTokens {
 591                                data: vec![
 592                                    0, // delta_line
 593                                    3, // delta_start
 594                                    4, // length
 595                                    0, // token_type
 596                                    0, // token_modifiers_bitset
 597                                ],
 598                                result_id: None, // Sending back `None` forces the client to not use deltas.
 599                            },
 600                        )))
 601                    }
 602                },
 603            );
 604
 605        cx.set_state("ˇfn main() {}");
 606        assert!(full_request.next().await.is_some());
 607
 608        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 609        task.await;
 610
 611        cx.set_state("ˇfn main() { a }");
 612        assert!(full_request.next().await.is_some());
 613
 614        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 615        task.await;
 616        assert_eq!(
 617            extract_semantic_highlights(&cx.editor, &cx),
 618            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 619        );
 620        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 621    }
 622
 623    #[gpui::test]
 624    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 625        init_test(cx, |_| {});
 626
 627        update_test_language_settings(cx, |language_settings| {
 628            language_settings.languages.0.insert(
 629                "Rust".into(),
 630                LanguageSettingsContent {
 631                    semantic_tokens: Some(SemanticTokens::Full),
 632                    ..LanguageSettingsContent::default()
 633                },
 634            );
 635        });
 636
 637        let mut cx = EditorLspTestContext::new_rust(
 638            lsp::ServerCapabilities {
 639                semantic_tokens_provider: Some(
 640                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 641                        lsp::SemanticTokensOptions {
 642                            legend: lsp::SemanticTokensLegend {
 643                                token_types: vec!["function".into()],
 644                                token_modifiers: Vec::new(),
 645                            },
 646                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 647                            ..lsp::SemanticTokensOptions::default()
 648                        },
 649                    ),
 650                ),
 651                ..lsp::ServerCapabilities::default()
 652            },
 653            cx,
 654        )
 655        .await;
 656
 657        let full_counter = Arc::new(AtomicUsize::new(0));
 658        let full_counter_clone = full_counter.clone();
 659        let delta_counter = Arc::new(AtomicUsize::new(0));
 660        let delta_counter_clone = delta_counter.clone();
 661
 662        let mut full_request = cx
 663            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 664                move |_, _, _| {
 665                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 666                    async move {
 667                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 668                            lsp::SemanticTokens {
 669                                data: vec![
 670                                    0, // delta_line
 671                                    3, // delta_start
 672                                    4, // length
 673                                    0, // token_type
 674                                    0, // token_modifiers_bitset
 675                                ],
 676                                result_id: Some("a".into()),
 677                            },
 678                        )))
 679                    }
 680                },
 681            );
 682
 683        let mut delta_request = cx
 684            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 685                move |_, params, _| {
 686                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 687                    assert_eq!(params.previous_result_id, "a");
 688                    async move {
 689                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 690                            lsp::SemanticTokensDelta {
 691                                edits: Vec::new(),
 692                                result_id: Some("b".into()),
 693                            },
 694                        )))
 695                    }
 696                },
 697            );
 698
 699        // Initial request, for the empty buffer.
 700        cx.set_state("ˇfn main() {}");
 701        assert!(full_request.next().await.is_some());
 702        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 703        task.await;
 704
 705        cx.set_state("ˇfn main() { a }");
 706        assert!(delta_request.next().await.is_some());
 707        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 708        task.await;
 709
 710        assert_eq!(
 711            extract_semantic_highlights(&cx.editor, &cx),
 712            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 713        );
 714
 715        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 716        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 717    }
 718
 719    #[gpui::test]
 720    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 721        init_test(cx, |_| {});
 722
 723        update_test_language_settings(cx, |language_settings| {
 724            language_settings.languages.0.insert(
 725                "TOML".into(),
 726                LanguageSettingsContent {
 727                    semantic_tokens: Some(SemanticTokens::Full),
 728                    ..LanguageSettingsContent::default()
 729                },
 730            );
 731        });
 732
 733        let toml_language = Arc::new(Language::new(
 734            LanguageConfig {
 735                name: "TOML".into(),
 736                matcher: LanguageMatcher {
 737                    path_suffixes: vec!["toml".into()],
 738                    ..LanguageMatcher::default()
 739                },
 740                ..LanguageConfig::default()
 741            },
 742            None,
 743        ));
 744
 745        // We have 2 language servers for TOML in this test.
 746        let toml_legend_1 = lsp::SemanticTokensLegend {
 747            token_types: vec!["property".into()],
 748            token_modifiers: Vec::new(),
 749        };
 750        let toml_legend_2 = lsp::SemanticTokensLegend {
 751            token_types: vec!["number".into()],
 752            token_modifiers: Vec::new(),
 753        };
 754
 755        let app_state = cx.update(workspace::AppState::test);
 756
 757        cx.update(|cx| {
 758            assets::Assets.load_test_fonts(cx);
 759            crate::init(cx);
 760            workspace::init(app_state.clone(), cx);
 761        });
 762
 763        let project = Project::test(app_state.fs.clone(), [], cx).await;
 764        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 765
 766        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 767        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 768        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 769        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 770
 771        let mut toml_server_1 = language_registry.register_fake_lsp(
 772            toml_language.name(),
 773            FakeLspAdapter {
 774                name: "toml1",
 775                capabilities: lsp::ServerCapabilities {
 776                    semantic_tokens_provider: Some(
 777                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 778                            lsp::SemanticTokensOptions {
 779                                legend: toml_legend_1,
 780                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 781                                ..lsp::SemanticTokensOptions::default()
 782                            },
 783                        ),
 784                    ),
 785                    ..lsp::ServerCapabilities::default()
 786                },
 787                initializer: Some(Box::new({
 788                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 789                    move |fake_server| {
 790                        let full_counter = full_counter_toml_1_clone.clone();
 791                        fake_server
 792                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 793                                move |_, _| {
 794                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 795                                    async move {
 796                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 797                                            lsp::SemanticTokens {
 798                                                // highlight 'a' as a property
 799                                                data: vec![
 800                                                    0, // delta_line
 801                                                    0, // delta_start
 802                                                    1, // length
 803                                                    0, // token_type
 804                                                    0, // token_modifiers_bitset
 805                                                ],
 806                                                result_id: Some("a".into()),
 807                                            },
 808                                        )))
 809                                    }
 810                                },
 811                            );
 812                    }
 813                })),
 814                ..FakeLspAdapter::default()
 815            },
 816        );
 817        let mut toml_server_2 = language_registry.register_fake_lsp(
 818            toml_language.name(),
 819            FakeLspAdapter {
 820                name: "toml2",
 821                capabilities: lsp::ServerCapabilities {
 822                    semantic_tokens_provider: Some(
 823                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 824                            lsp::SemanticTokensOptions {
 825                                legend: toml_legend_2,
 826                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 827                                ..lsp::SemanticTokensOptions::default()
 828                            },
 829                        ),
 830                    ),
 831                    ..lsp::ServerCapabilities::default()
 832                },
 833                initializer: Some(Box::new({
 834                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 835                    move |fake_server| {
 836                        let full_counter = full_counter_toml_2_clone.clone();
 837                        fake_server
 838                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 839                                move |_, _| {
 840                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 841                                    async move {
 842                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 843                                            lsp::SemanticTokens {
 844                                                // highlight '3' as a literal
 845                                                data: vec![
 846                                                    0, // delta_line
 847                                                    4, // delta_start
 848                                                    1, // length
 849                                                    0, // token_type
 850                                                    0, // token_modifiers_bitset
 851                                                ],
 852                                                result_id: Some("a".into()),
 853                                            },
 854                                        )))
 855                                    }
 856                                },
 857                            );
 858                    }
 859                })),
 860                ..FakeLspAdapter::default()
 861            },
 862        );
 863        language_registry.add(toml_language.clone());
 864
 865        app_state
 866            .fs
 867            .as_fake()
 868            .insert_tree(
 869                EditorLspTestContext::root_path(),
 870                json!({
 871                    ".git": {},
 872                    "dir": {
 873                        "foo.toml": "a = 1\nb = 2\n",
 874                    }
 875                }),
 876            )
 877            .await;
 878
 879        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
 880        let workspace = window.root(cx).unwrap();
 881
 882        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
 883        project
 884            .update(&mut cx, |project, cx| {
 885                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 886            })
 887            .await
 888            .unwrap();
 889        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 890            .await;
 891
 892        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 893        let toml_item = workspace
 894            .update_in(&mut cx, |workspace, window, cx| {
 895                workspace.open_path(toml_file, None, true, window, cx)
 896            })
 897            .await
 898            .expect("Could not open test file");
 899
 900        let editor = cx.update(|_, cx| {
 901            toml_item
 902                .act_as::<Editor>(cx)
 903                .expect("Opened test file wasn't an editor")
 904        });
 905
 906        editor.update_in(&mut cx, |editor, window, cx| {
 907            let nav_history = workspace
 908                .read(cx)
 909                .active_pane()
 910                .read(cx)
 911                .nav_history_for_item(&cx.entity());
 912            editor.set_nav_history(Some(nav_history));
 913            window.focus(&editor.focus_handle(cx), cx)
 914        });
 915
 916        let _toml_server_1 = toml_server_1.next().await.unwrap();
 917        let _toml_server_2 = toml_server_2.next().await.unwrap();
 918
 919        // Trigger semantic tokens.
 920        editor.update_in(&mut cx, |editor, _, cx| {
 921            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 922        });
 923        cx.executor().advance_clock(Duration::from_millis(200));
 924        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
 925        cx.run_until_parked();
 926        task.await;
 927
 928        assert_eq!(
 929            extract_semantic_highlights(&editor, &cx),
 930            vec![
 931                MultiBufferOffset(0)..MultiBufferOffset(1),
 932                MultiBufferOffset(4)..MultiBufferOffset(5),
 933            ]
 934        );
 935
 936        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 937        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 938    }
 939
 940    #[gpui::test]
 941    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 942        init_test(cx, |_| {});
 943
 944        update_test_language_settings(cx, |language_settings| {
 945            language_settings.languages.0.insert(
 946                "TOML".into(),
 947                LanguageSettingsContent {
 948                    semantic_tokens: Some(SemanticTokens::Full),
 949                    ..LanguageSettingsContent::default()
 950                },
 951            );
 952            language_settings.languages.0.insert(
 953                "Rust".into(),
 954                LanguageSettingsContent {
 955                    semantic_tokens: Some(SemanticTokens::Full),
 956                    ..LanguageSettingsContent::default()
 957                },
 958            );
 959        });
 960
 961        let toml_language = Arc::new(Language::new(
 962            LanguageConfig {
 963                name: "TOML".into(),
 964                matcher: LanguageMatcher {
 965                    path_suffixes: vec!["toml".into()],
 966                    ..LanguageMatcher::default()
 967                },
 968                ..LanguageConfig::default()
 969            },
 970            None,
 971        ));
 972        let rust_language = Arc::new(Language::new(
 973            LanguageConfig {
 974                name: "Rust".into(),
 975                matcher: LanguageMatcher {
 976                    path_suffixes: vec!["rs".into()],
 977                    ..LanguageMatcher::default()
 978                },
 979                ..LanguageConfig::default()
 980            },
 981            None,
 982        ));
 983
 984        let toml_legend = lsp::SemanticTokensLegend {
 985            token_types: vec!["property".into()],
 986            token_modifiers: Vec::new(),
 987        };
 988        let rust_legend = lsp::SemanticTokensLegend {
 989            token_types: vec!["constant".into()],
 990            token_modifiers: Vec::new(),
 991        };
 992
 993        let app_state = cx.update(workspace::AppState::test);
 994
 995        cx.update(|cx| {
 996            assets::Assets.load_test_fonts(cx);
 997            crate::init(cx);
 998            workspace::init(app_state.clone(), cx);
 999        });
1000
1001        let project = Project::test(app_state.fs.clone(), [], cx).await;
1002        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1003        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1004        let full_counter_toml_clone = full_counter_toml.clone();
1005
1006        let mut toml_server = language_registry.register_fake_lsp(
1007            toml_language.name(),
1008            FakeLspAdapter {
1009                name: "toml",
1010                capabilities: lsp::ServerCapabilities {
1011                    semantic_tokens_provider: Some(
1012                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1013                            lsp::SemanticTokensOptions {
1014                                legend: toml_legend,
1015                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1016                                ..lsp::SemanticTokensOptions::default()
1017                            },
1018                        ),
1019                    ),
1020                    ..lsp::ServerCapabilities::default()
1021                },
1022                initializer: Some(Box::new({
1023                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1024                    move |fake_server| {
1025                        let full_counter = full_counter_toml_clone.clone();
1026                        fake_server
1027                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1028                                move |_, _| {
1029                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1030                                    async move {
1031                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1032                                            lsp::SemanticTokens {
1033                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1034                                                data: vec![
1035                                                    0, // delta_line (line 0)
1036                                                    0, // delta_start
1037                                                    1, // length
1038                                                    0, // token_type
1039                                                    0, // token_modifiers_bitset
1040                                                    1, // delta_line (line 1)
1041                                                    0, // delta_start
1042                                                    1, // length
1043                                                    0, // token_type
1044                                                    0, // token_modifiers_bitset
1045                                                    1, // delta_line (line 2)
1046                                                    0, // delta_start
1047                                                    1, // length
1048                                                    0, // token_type
1049                                                    0, // token_modifiers_bitset
1050                                                ],
1051                                                result_id: Some("a".into()),
1052                                            },
1053                                        )))
1054                                    }
1055                                },
1056                            );
1057                    }
1058                })),
1059                ..FakeLspAdapter::default()
1060            },
1061        );
1062        language_registry.add(toml_language.clone());
1063        let mut rust_server = language_registry.register_fake_lsp(
1064            rust_language.name(),
1065            FakeLspAdapter {
1066                name: "rust",
1067                capabilities: lsp::ServerCapabilities {
1068                    semantic_tokens_provider: Some(
1069                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1070                            lsp::SemanticTokensOptions {
1071                                legend: rust_legend,
1072                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1073                                ..lsp::SemanticTokensOptions::default()
1074                            },
1075                        ),
1076                    ),
1077                    ..lsp::ServerCapabilities::default()
1078                },
1079                ..FakeLspAdapter::default()
1080            },
1081        );
1082        language_registry.add(rust_language.clone());
1083
1084        app_state
1085            .fs
1086            .as_fake()
1087            .insert_tree(
1088                EditorLspTestContext::root_path(),
1089                json!({
1090                    ".git": {},
1091                    "dir": {
1092                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1093                        "bar.rs": "const c: usize = 3;\n",
1094                    }
1095                }),
1096            )
1097            .await;
1098
1099        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
1100        let workspace = window.root(cx).unwrap();
1101
1102        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
1103        project
1104            .update(&mut cx, |project, cx| {
1105                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1106            })
1107            .await
1108            .unwrap();
1109        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1110            .await;
1111
1112        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1113        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1114        let (toml_item, rust_item) = workspace.update_in(&mut cx, |workspace, window, cx| {
1115            (
1116                workspace.open_path(toml_file, None, true, window, cx),
1117                workspace.open_path(rust_file, None, true, window, cx),
1118            )
1119        });
1120        let toml_item = toml_item.await.expect("Could not open test file");
1121        let rust_item = rust_item.await.expect("Could not open test file");
1122
1123        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1124            (
1125                toml_item
1126                    .act_as::<Editor>(cx)
1127                    .expect("Opened test file wasn't an editor"),
1128                rust_item
1129                    .act_as::<Editor>(cx)
1130                    .expect("Opened test file wasn't an editor"),
1131            )
1132        });
1133        let toml_buffer = cx.read(|cx| {
1134            toml_editor
1135                .read(cx)
1136                .buffer()
1137                .read(cx)
1138                .as_singleton()
1139                .unwrap()
1140        });
1141        let rust_buffer = cx.read(|cx| {
1142            rust_editor
1143                .read(cx)
1144                .buffer()
1145                .read(cx)
1146                .as_singleton()
1147                .unwrap()
1148        });
1149        let multibuffer = cx.new(|cx| {
1150            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1151            multibuffer.push_excerpts(
1152                toml_buffer.clone(),
1153                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1154                cx,
1155            );
1156            multibuffer.push_excerpts(
1157                rust_buffer.clone(),
1158                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1159                cx,
1160            );
1161            multibuffer
1162        });
1163
1164        let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
1165            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1166            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1167            editor
1168        });
1169        editor.update_in(&mut cx, |editor, window, cx| {
1170            let nav_history = workspace
1171                .read(cx)
1172                .active_pane()
1173                .read(cx)
1174                .nav_history_for_item(&cx.entity());
1175            editor.set_nav_history(Some(nav_history));
1176            window.focus(&editor.focus_handle(cx), cx)
1177        });
1178
1179        let _toml_server = toml_server.next().await.unwrap();
1180        let _rust_server = rust_server.next().await.unwrap();
1181
1182        // Initial request.
1183        cx.executor().advance_clock(Duration::from_millis(200));
1184        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1185        cx.run_until_parked();
1186        task.await;
1187        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1188        cx.run_until_parked();
1189
1190        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1191        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1192        assert_eq!(
1193            extract_semantic_highlights(&editor, &cx),
1194            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1195        );
1196
1197        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1198        let toml_excerpt_id =
1199            editor.read_with(&cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1200        editor.update_in(&mut cx, |editor, _, cx| {
1201            editor.buffer().update(cx, |buffer, cx| {
1202                buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1203            });
1204        });
1205
1206        // Wait for semantic tokens to be re-fetched after expansion.
1207        cx.executor().advance_clock(Duration::from_millis(200));
1208        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1209        cx.run_until_parked();
1210        task.await;
1211
1212        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1213        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1214        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1215        assert_eq!(
1216            extract_semantic_highlights(&editor, &cx),
1217            vec![
1218                MultiBufferOffset(0)..MultiBufferOffset(1),
1219                MultiBufferOffset(6)..MultiBufferOffset(7),
1220                MultiBufferOffset(12)..MultiBufferOffset(13),
1221            ]
1222        );
1223    }
1224
1225    #[gpui::test]
1226    async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
1227        init_test(cx, |_| {});
1228
1229        update_test_language_settings(cx, |language_settings| {
1230            language_settings.languages.0.insert(
1231                "TOML".into(),
1232                LanguageSettingsContent {
1233                    semantic_tokens: Some(SemanticTokens::Full),
1234                    ..LanguageSettingsContent::default()
1235                },
1236            );
1237        });
1238
1239        let toml_language = Arc::new(Language::new(
1240            LanguageConfig {
1241                name: "TOML".into(),
1242                matcher: LanguageMatcher {
1243                    path_suffixes: vec!["toml".into()],
1244                    ..LanguageMatcher::default()
1245                },
1246                ..LanguageConfig::default()
1247            },
1248            None,
1249        ));
1250
1251        let toml_legend = lsp::SemanticTokensLegend {
1252            token_types: vec!["property".into()],
1253            token_modifiers: Vec::new(),
1254        };
1255
1256        let app_state = cx.update(workspace::AppState::test);
1257
1258        cx.update(|cx| {
1259            assets::Assets.load_test_fonts(cx);
1260            crate::init(cx);
1261            workspace::init(app_state.clone(), cx);
1262        });
1263
1264        let project = Project::test(app_state.fs.clone(), [], cx).await;
1265        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1266        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1267        let full_counter_toml_clone = full_counter_toml.clone();
1268
1269        let mut toml_server = language_registry.register_fake_lsp(
1270            toml_language.name(),
1271            FakeLspAdapter {
1272                name: "toml",
1273                capabilities: lsp::ServerCapabilities {
1274                    semantic_tokens_provider: Some(
1275                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1276                            lsp::SemanticTokensOptions {
1277                                legend: toml_legend,
1278                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1279                                ..lsp::SemanticTokensOptions::default()
1280                            },
1281                        ),
1282                    ),
1283                    ..lsp::ServerCapabilities::default()
1284                },
1285                initializer: Some(Box::new({
1286                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1287                    move |fake_server| {
1288                        let full_counter = full_counter_toml_clone.clone();
1289                        fake_server
1290                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1291                                move |_, _| {
1292                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1293                                    async move {
1294                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1295                                            lsp::SemanticTokens {
1296                                                // highlight 'a' as a property
1297                                                data: vec![
1298                                                    0, // delta_line
1299                                                    0, // delta_start
1300                                                    1, // length
1301                                                    0, // token_type
1302                                                    0, // token_modifiers_bitset
1303                                                ],
1304                                                result_id: Some("a".into()),
1305                                            },
1306                                        )))
1307                                    }
1308                                },
1309                            );
1310                    }
1311                })),
1312                ..FakeLspAdapter::default()
1313            },
1314        );
1315        language_registry.add(toml_language.clone());
1316
1317        app_state
1318            .fs
1319            .as_fake()
1320            .insert_tree(
1321                EditorLspTestContext::root_path(),
1322                json!({
1323                    ".git": {},
1324                    "dir": {
1325                        "foo.toml": "a = 1\nb = 2\n",
1326                    }
1327                }),
1328            )
1329            .await;
1330
1331        let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
1332        let workspace = window.root(cx).unwrap();
1333
1334        let mut cx = VisualTestContext::from_window(*window.deref(), cx);
1335        project
1336            .update(&mut cx, |project, cx| {
1337                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1338            })
1339            .await
1340            .unwrap();
1341        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1342            .await;
1343
1344        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1345        let toml_item = workspace
1346            .update_in(&mut cx, |workspace, window, cx| {
1347                workspace.open_path(toml_file, None, true, window, cx)
1348            })
1349            .await
1350            .expect("Could not open test file");
1351
1352        let toml_editor = cx.update(|_, cx| {
1353            toml_item
1354                .act_as::<Editor>(cx)
1355                .expect("Opened test file wasn't an editor")
1356        });
1357        let toml_buffer = cx.read(|cx| {
1358            toml_editor
1359                .read(cx)
1360                .buffer()
1361                .read(cx)
1362                .as_singleton()
1363                .unwrap()
1364        });
1365        let multibuffer = cx.new(|cx| {
1366            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1367            multibuffer.push_excerpts(
1368                toml_buffer.clone(),
1369                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1370                cx,
1371            );
1372            multibuffer.push_excerpts(
1373                toml_buffer.clone(),
1374                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1375                cx,
1376            );
1377            multibuffer
1378        });
1379
1380        let editor = workspace.update_in(&mut cx, |_, window, cx| {
1381            cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
1382        });
1383        editor.update_in(&mut cx, |editor, window, cx| {
1384            let nav_history = workspace
1385                .read(cx)
1386                .active_pane()
1387                .read(cx)
1388                .nav_history_for_item(&cx.entity());
1389            editor.set_nav_history(Some(nav_history));
1390            window.focus(&editor.focus_handle(cx), cx)
1391        });
1392
1393        let _toml_server = toml_server.next().await.unwrap();
1394
1395        // Initial request.
1396        cx.executor().advance_clock(Duration::from_millis(200));
1397        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1398        cx.run_until_parked();
1399        task.await;
1400        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1401
1402        // Edit two parts of the multibuffer, which both map to the same buffer.
1403        //
1404        // Without debouncing, this grabs semantic tokens 4 times (twice for the
1405        // toml editor, and twice for the multibuffer).
1406        editor.update_in(&mut cx, |editor, _, cx| {
1407            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
1408            editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
1409        });
1410        cx.executor().advance_clock(Duration::from_millis(200));
1411        let task = editor.update_in(&mut cx, |e, _, _| e.semantic_token_state.take_update_task());
1412        cx.run_until_parked();
1413        task.await;
1414        assert_eq!(
1415            extract_semantic_highlights(&editor, &cx),
1416            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1417        );
1418
1419        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
1420    }
1421
1422    fn extract_semantic_highlights(
1423        editor: &Entity<Editor>,
1424        cx: &TestAppContext,
1425    ) -> Vec<Range<MultiBufferOffset>> {
1426        editor.read_with(cx, |editor, cx| {
1427            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1428            editor
1429                .display_map
1430                .read(cx)
1431                .semantic_token_highlights
1432                .iter()
1433                .flat_map(|(_, (v, _))| v.iter())
1434                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1435                .collect()
1436        })
1437    }
1438
1439    #[gpui::test]
1440    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1441        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1442        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1443
1444        init_test(cx, |_| {});
1445
1446        update_test_language_settings(cx, |language_settings| {
1447            language_settings.languages.0.insert(
1448                "Rust".into(),
1449                LanguageSettingsContent {
1450                    semantic_tokens: Some(SemanticTokens::Full),
1451                    ..LanguageSettingsContent::default()
1452                },
1453            );
1454        });
1455
1456        let mut cx = EditorLspTestContext::new_rust(
1457            lsp::ServerCapabilities {
1458                semantic_tokens_provider: Some(
1459                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1460                        lsp::SemanticTokensOptions {
1461                            legend: lsp::SemanticTokensLegend {
1462                                token_types: Vec::from(["function".into()]),
1463                                token_modifiers: Vec::new(),
1464                            },
1465                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1466                            ..lsp::SemanticTokensOptions::default()
1467                        },
1468                    ),
1469                ),
1470                ..lsp::ServerCapabilities::default()
1471            },
1472            cx,
1473        )
1474        .await;
1475
1476        let mut full_request = cx
1477            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1478                move |_, _, _| {
1479                    async move {
1480                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1481                            lsp::SemanticTokens {
1482                                data: vec![
1483                                    0, // delta_line
1484                                    3, // delta_start
1485                                    4, // length
1486                                    0, // token_type (function)
1487                                    0, // token_modifiers_bitset
1488                                ],
1489                                result_id: None,
1490                            },
1491                        )))
1492                    }
1493                },
1494            );
1495
1496        // Trigger initial semantic tokens fetch
1497        cx.set_state("ˇfn main() {}");
1498        full_request.next().await;
1499        cx.run_until_parked();
1500
1501        // Verify initial highlights exist (with no custom color yet)
1502        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1503        assert_eq!(
1504            initial_ranges,
1505            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1506            "Should have initial semantic token highlights"
1507        );
1508        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1509        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1510        // Initial color should be None or theme default (not red or blue)
1511        let initial_color = initial_styles[0].color;
1512
1513        // Set a custom foreground color for function tokens via settings.json
1514        let red_color = Rgba {
1515            r: 1.0,
1516            g: 0.0,
1517            b: 0.0,
1518            a: 1.0,
1519        };
1520        cx.update(|_, cx| {
1521            SettingsStore::update_global(cx, |store, cx| {
1522                store.update_user_settings(cx, |settings| {
1523                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1524                        semantic_token_rules: Some(SemanticTokenRules {
1525                            rules: Vec::from([SemanticTokenRule {
1526                                token_type: Some("function".to_string()),
1527                                foreground_color: Some(red_color),
1528                                ..SemanticTokenRule::default()
1529                            }]),
1530                        }),
1531                        ..GlobalLspSettingsContent::default()
1532                    });
1533                });
1534            });
1535        });
1536
1537        // Trigger a refetch by making an edit (which forces semantic tokens update)
1538        cx.set_state("ˇfn main() { }");
1539        full_request.next().await;
1540        cx.run_until_parked();
1541
1542        // Verify the highlights now have the custom red color
1543        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1544        assert_eq!(
1545            styles_after_settings_change.len(),
1546            1,
1547            "Should still have one highlight"
1548        );
1549        assert_eq!(
1550            styles_after_settings_change[0].color,
1551            Some(Hsla::from(red_color)),
1552            "Highlight should have the custom red color from settings.json"
1553        );
1554        assert_ne!(
1555            styles_after_settings_change[0].color, initial_color,
1556            "Color should have changed from initial"
1557        );
1558    }
1559
1560    #[gpui::test]
1561    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1562        use collections::IndexMap;
1563        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1564        use theme::{HighlightStyleContent, ThemeStyleContent};
1565
1566        init_test(cx, |_| {});
1567
1568        update_test_language_settings(cx, |language_settings| {
1569            language_settings.languages.0.insert(
1570                "Rust".into(),
1571                LanguageSettingsContent {
1572                    semantic_tokens: Some(SemanticTokens::Full),
1573                    ..LanguageSettingsContent::default()
1574                },
1575            );
1576        });
1577
1578        let mut cx = EditorLspTestContext::new_rust(
1579            lsp::ServerCapabilities {
1580                semantic_tokens_provider: Some(
1581                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1582                        lsp::SemanticTokensOptions {
1583                            legend: lsp::SemanticTokensLegend {
1584                                token_types: Vec::from(["function".into()]),
1585                                token_modifiers: Vec::new(),
1586                            },
1587                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1588                            ..lsp::SemanticTokensOptions::default()
1589                        },
1590                    ),
1591                ),
1592                ..lsp::ServerCapabilities::default()
1593            },
1594            cx,
1595        )
1596        .await;
1597
1598        let mut full_request = cx
1599            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1600                move |_, _, _| async move {
1601                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1602                        lsp::SemanticTokens {
1603                            data: vec![
1604                                0, // delta_line
1605                                3, // delta_start
1606                                4, // length
1607                                0, // token_type (function)
1608                                0, // token_modifiers_bitset
1609                            ],
1610                            result_id: None,
1611                        },
1612                    )))
1613                },
1614            );
1615
1616        cx.set_state("ˇfn main() {}");
1617        full_request.next().await;
1618        cx.run_until_parked();
1619
1620        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1621        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1622        let initial_color = initial_styles[0].color;
1623
1624        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1625        // which fires theme_changed → refresh_semantic_token_highlights.
1626        let red_color: Hsla = Rgba {
1627            r: 1.0,
1628            g: 0.0,
1629            b: 0.0,
1630            a: 1.0,
1631        }
1632        .into();
1633        cx.update(|_, cx| {
1634            SettingsStore::update_global(cx, |store, cx| {
1635                store.update_user_settings(cx, |settings| {
1636                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1637                        syntax: IndexMap::from_iter([(
1638                            "function".to_string(),
1639                            HighlightStyleContent {
1640                                color: Some("#ff0000".to_string()),
1641                                background_color: None,
1642                                font_style: None,
1643                                font_weight: None,
1644                            },
1645                        )]),
1646                        ..ThemeStyleContent::default()
1647                    });
1648                });
1649            });
1650        });
1651
1652        cx.executor().advance_clock(Duration::from_millis(200));
1653        cx.run_until_parked();
1654
1655        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1656        assert_eq!(styles_after_override.len(), 1);
1657        assert_eq!(
1658            styles_after_override[0].color,
1659            Some(red_color),
1660            "Highlight should have red color from theme override"
1661        );
1662        assert_ne!(
1663            styles_after_override[0].color, initial_color,
1664            "Color should have changed from initial"
1665        );
1666
1667        // Changing the override to a different color also restyles.
1668        let blue_color: Hsla = Rgba {
1669            r: 0.0,
1670            g: 0.0,
1671            b: 1.0,
1672            a: 1.0,
1673        }
1674        .into();
1675        cx.update(|_, cx| {
1676            SettingsStore::update_global(cx, |store, cx| {
1677                store.update_user_settings(cx, |settings| {
1678                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1679                        syntax: IndexMap::from_iter([(
1680                            "function".to_string(),
1681                            HighlightStyleContent {
1682                                color: Some("#0000ff".to_string()),
1683                                background_color: None,
1684                                font_style: None,
1685                                font_weight: None,
1686                            },
1687                        )]),
1688                        ..ThemeStyleContent::default()
1689                    });
1690                });
1691            });
1692        });
1693
1694        cx.executor().advance_clock(Duration::from_millis(200));
1695        cx.run_until_parked();
1696
1697        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1698        assert_eq!(styles_after_second_override.len(), 1);
1699        assert_eq!(
1700            styles_after_second_override[0].color,
1701            Some(blue_color),
1702            "Highlight should have blue color from updated theme override"
1703        );
1704
1705        // Removing overrides reverts to the original theme color.
1706        cx.update(|_, cx| {
1707            SettingsStore::update_global(cx, |store, cx| {
1708                store.update_user_settings(cx, |settings| {
1709                    settings.theme.experimental_theme_overrides = None;
1710                });
1711            });
1712        });
1713
1714        cx.executor().advance_clock(Duration::from_millis(200));
1715        cx.run_until_parked();
1716
1717        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1718        assert_eq!(styles_after_clear.len(), 1);
1719        assert_eq!(
1720            styles_after_clear[0].color, initial_color,
1721            "Highlight should revert to initial color after clearing overrides"
1722        );
1723    }
1724
1725    #[gpui::test]
1726    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1727        use collections::IndexMap;
1728        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1729        use theme::{HighlightStyleContent, ThemeStyleContent};
1730        use ui::ActiveTheme as _;
1731
1732        init_test(cx, |_| {});
1733
1734        update_test_language_settings(cx, |language_settings| {
1735            language_settings.languages.0.insert(
1736                "Rust".into(),
1737                LanguageSettingsContent {
1738                    semantic_tokens: Some(SemanticTokens::Full),
1739                    ..LanguageSettingsContent::default()
1740                },
1741            );
1742        });
1743
1744        let mut cx = EditorLspTestContext::new_rust(
1745            lsp::ServerCapabilities {
1746                semantic_tokens_provider: Some(
1747                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1748                        lsp::SemanticTokensOptions {
1749                            legend: lsp::SemanticTokensLegend {
1750                                token_types: Vec::from(["function".into()]),
1751                                token_modifiers: Vec::new(),
1752                            },
1753                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1754                            ..lsp::SemanticTokensOptions::default()
1755                        },
1756                    ),
1757                ),
1758                ..lsp::ServerCapabilities::default()
1759            },
1760            cx,
1761        )
1762        .await;
1763
1764        let mut full_request = cx
1765            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1766                move |_, _, _| async move {
1767                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1768                        lsp::SemanticTokens {
1769                            data: vec![
1770                                0, // delta_line
1771                                3, // delta_start
1772                                4, // length
1773                                0, // token_type (function)
1774                                0, // token_modifiers_bitset
1775                            ],
1776                            result_id: None,
1777                        },
1778                    )))
1779                },
1780            );
1781
1782        cx.set_state("ˇfn main() {}");
1783        full_request.next().await;
1784        cx.run_until_parked();
1785
1786        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1787        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1788        let initial_color = initial_styles[0].color;
1789
1790        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1791        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1792        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1793        let green_color: Hsla = Rgba {
1794            r: 0.0,
1795            g: 1.0,
1796            b: 0.0,
1797            a: 1.0,
1798        }
1799        .into();
1800        cx.update(|_, cx| {
1801            SettingsStore::update_global(cx, |store, cx| {
1802                store.update_user_settings(cx, |settings| {
1803                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1804                        theme_name.clone(),
1805                        ThemeStyleContent {
1806                            syntax: IndexMap::from_iter([(
1807                                "function".to_string(),
1808                                HighlightStyleContent {
1809                                    color: Some("#00ff00".to_string()),
1810                                    background_color: None,
1811                                    font_style: None,
1812                                    font_weight: None,
1813                                },
1814                            )]),
1815                            ..ThemeStyleContent::default()
1816                        },
1817                    )]);
1818                });
1819            });
1820        });
1821
1822        cx.executor().advance_clock(Duration::from_millis(200));
1823        cx.run_until_parked();
1824
1825        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1826        assert_eq!(styles_after_override.len(), 1);
1827        assert_eq!(
1828            styles_after_override[0].color,
1829            Some(green_color),
1830            "Highlight should have green color from per-theme override"
1831        );
1832        assert_ne!(
1833            styles_after_override[0].color, initial_color,
1834            "Color should have changed from initial"
1835        );
1836    }
1837
1838    #[gpui::test]
1839    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1840        init_test(cx, |_| {});
1841
1842        update_test_language_settings(cx, |language_settings| {
1843            language_settings.languages.0.insert(
1844                "Rust".into(),
1845                LanguageSettingsContent {
1846                    semantic_tokens: Some(SemanticTokens::Full),
1847                    ..LanguageSettingsContent::default()
1848                },
1849            );
1850        });
1851
1852        let mut cx = EditorLspTestContext::new_rust(
1853            lsp::ServerCapabilities {
1854                semantic_tokens_provider: Some(
1855                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1856                        lsp::SemanticTokensOptions {
1857                            legend: lsp::SemanticTokensLegend {
1858                                token_types: vec!["function".into()],
1859                                token_modifiers: Vec::new(),
1860                            },
1861                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1862                            ..lsp::SemanticTokensOptions::default()
1863                        },
1864                    ),
1865                ),
1866                ..lsp::ServerCapabilities::default()
1867            },
1868            cx,
1869        )
1870        .await;
1871
1872        let mut full_request = cx
1873            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1874                move |_, _, _| async move {
1875                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1876                        lsp::SemanticTokens {
1877                            data: vec![
1878                                0, // delta_line
1879                                3, // delta_start
1880                                4, // length
1881                                0, // token_type
1882                                0, // token_modifiers_bitset
1883                            ],
1884                            result_id: None,
1885                        },
1886                    )))
1887                },
1888            );
1889
1890        cx.set_state("ˇfn main() {}");
1891        assert!(full_request.next().await.is_some());
1892        cx.run_until_parked();
1893
1894        assert_eq!(
1895            extract_semantic_highlights(&cx.editor, &cx),
1896            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1897            "Semantic tokens should be present before stopping the server"
1898        );
1899
1900        cx.update_editor(|editor, _, cx| {
1901            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1902            editor.project.as_ref().unwrap().update(cx, |project, cx| {
1903                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1904            })
1905        });
1906        cx.executor().advance_clock(Duration::from_millis(200));
1907        cx.run_until_parked();
1908
1909        assert_eq!(
1910            extract_semantic_highlights(&cx.editor, &cx),
1911            Vec::new(),
1912            "Semantic tokens should be cleared after stopping the server"
1913        );
1914    }
1915
1916    #[gpui::test]
1917    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1918        init_test(cx, |_| {});
1919
1920        update_test_language_settings(cx, |language_settings| {
1921            language_settings.languages.0.insert(
1922                "Rust".into(),
1923                LanguageSettingsContent {
1924                    semantic_tokens: Some(SemanticTokens::Full),
1925                    ..LanguageSettingsContent::default()
1926                },
1927            );
1928        });
1929
1930        let mut cx = EditorLspTestContext::new_rust(
1931            lsp::ServerCapabilities {
1932                semantic_tokens_provider: Some(
1933                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1934                        lsp::SemanticTokensOptions {
1935                            legend: lsp::SemanticTokensLegend {
1936                                token_types: vec!["function".into()],
1937                                token_modifiers: Vec::new(),
1938                            },
1939                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1940                            ..lsp::SemanticTokensOptions::default()
1941                        },
1942                    ),
1943                ),
1944                ..lsp::ServerCapabilities::default()
1945            },
1946            cx,
1947        )
1948        .await;
1949
1950        let mut full_request = cx
1951            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1952                move |_, _, _| async move {
1953                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1954                        lsp::SemanticTokens {
1955                            data: vec![
1956                                0, // delta_line
1957                                3, // delta_start
1958                                4, // length
1959                                0, // token_type
1960                                0, // token_modifiers_bitset
1961                            ],
1962                            result_id: None,
1963                        },
1964                    )))
1965                },
1966            );
1967
1968        cx.set_state("ˇfn main() {}");
1969        assert!(full_request.next().await.is_some());
1970        cx.run_until_parked();
1971
1972        assert_eq!(
1973            extract_semantic_highlights(&cx.editor, &cx),
1974            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1975            "Semantic tokens should be present before disabling the setting"
1976        );
1977
1978        update_test_language_settings(&mut cx, |language_settings| {
1979            language_settings.languages.0.insert(
1980                "Rust".into(),
1981                LanguageSettingsContent {
1982                    semantic_tokens: Some(SemanticTokens::Off),
1983                    ..LanguageSettingsContent::default()
1984                },
1985            );
1986        });
1987        cx.executor().advance_clock(Duration::from_millis(200));
1988        cx.run_until_parked();
1989
1990        assert_eq!(
1991            extract_semantic_highlights(&cx.editor, &cx),
1992            Vec::new(),
1993            "Semantic tokens should be cleared after disabling the setting"
1994        );
1995    }
1996
1997    fn extract_semantic_highlight_styles(
1998        editor: &Entity<Editor>,
1999        cx: &TestAppContext,
2000    ) -> Vec<HighlightStyle> {
2001        editor.read_with(cx, |editor, cx| {
2002            editor
2003                .display_map
2004                .read(cx)
2005                .semantic_token_highlights
2006                .iter()
2007                .flat_map(|(_, (v, interner))| {
2008                    v.iter().map(|highlights| interner[highlights.style])
2009                })
2010                .collect()
2011        })
2012    }
2013}