semantic_tokens.rs

   1use std::{collections::hash_map, sync::Arc, time::Duration};
   2
   3use collections::{HashMap, HashSet};
   4use futures::future::join_all;
   5use gpui::{
   6    App, Context, FontStyle, FontWeight, HighlightStyle, StrikethroughStyle, Task, UnderlineStyle,
   7};
   8use itertools::Itertools;
   9use language::language_settings::language_settings;
  10use project::{
  11    lsp_store::{
  12        BufferSemanticToken, BufferSemanticTokens, RefreshForServer, SemanticTokenStylizer,
  13        TokenType,
  14    },
  15    project_settings::ProjectSettings,
  16};
  17use settings::{
  18    SemanticTokenColorOverride, SemanticTokenFontStyle, SemanticTokenFontWeight,
  19    SemanticTokenRules, Settings as _,
  20};
  21use text::BufferId;
  22use theme::SyntaxTheme;
  23use ui::ActiveTheme as _;
  24
  25use crate::{
  26    Editor,
  27    actions::ToggleSemanticHighlights,
  28    display_map::{HighlightStyleInterner, SemanticTokenHighlight},
  29};
  30
  31pub(super) struct SemanticTokenState {
  32    rules: SemanticTokenRules,
  33    enabled: bool,
  34    update_task: Task<()>,
  35    fetched_for_buffers: HashMap<BufferId, clock::Global>,
  36}
  37
  38impl SemanticTokenState {
  39    pub(super) fn new(cx: &App, enabled: bool) -> Self {
  40        Self {
  41            rules: ProjectSettings::get_global(cx)
  42                .global_lsp_settings
  43                .semantic_token_rules
  44                .clone(),
  45            enabled,
  46            update_task: Task::ready(()),
  47            fetched_for_buffers: HashMap::default(),
  48        }
  49    }
  50
  51    pub(super) fn enabled(&self) -> bool {
  52        self.enabled
  53    }
  54
  55    pub(super) fn toggle_enabled(&mut self) {
  56        self.enabled = !self.enabled;
  57    }
  58
  59    #[cfg(test)]
  60    pub(super) fn take_update_task(&mut self) -> Task<()> {
  61        std::mem::replace(&mut self.update_task, Task::ready(()))
  62    }
  63
  64    pub(super) fn invalidate_buffer(&mut self, buffer_id: &BufferId) {
  65        self.fetched_for_buffers.remove(buffer_id);
  66    }
  67
  68    pub(super) fn update_rules(&mut self, new_rules: SemanticTokenRules) -> bool {
  69        if new_rules != self.rules {
  70            self.rules = new_rules;
  71            true
  72        } else {
  73            false
  74        }
  75    }
  76}
  77
  78impl Editor {
  79    pub fn supports_semantic_tokens(&self, cx: &mut App) -> bool {
  80        let Some(provider) = self.semantics_provider.as_ref() else {
  81            return false;
  82        };
  83
  84        let mut supports = false;
  85        self.buffer().update(cx, |this, cx| {
  86            this.for_each_buffer(&mut |buffer| {
  87                supports |= provider.supports_semantic_tokens(buffer, cx);
  88            });
  89        });
  90
  91        supports
  92    }
  93
  94    pub fn semantic_highlights_enabled(&self) -> bool {
  95        self.semantic_token_state.enabled()
  96    }
  97
  98    pub fn toggle_semantic_highlights(
  99        &mut self,
 100        _: &ToggleSemanticHighlights,
 101        _window: &mut gpui::Window,
 102        cx: &mut Context<Self>,
 103    ) {
 104        self.semantic_token_state.toggle_enabled();
 105        self.invalidate_semantic_tokens(None);
 106        self.refresh_semantic_tokens(None, None, cx);
 107    }
 108
 109    pub(super) fn invalidate_semantic_tokens(&mut self, for_buffer: Option<BufferId>) {
 110        match for_buffer {
 111            Some(for_buffer) => self.semantic_token_state.invalidate_buffer(&for_buffer),
 112            None => self.semantic_token_state.fetched_for_buffers.clear(),
 113        }
 114    }
 115
 116    pub(super) fn refresh_semantic_tokens(
 117        &mut self,
 118        buffer_id: Option<BufferId>,
 119        for_server: Option<RefreshForServer>,
 120        cx: &mut Context<Self>,
 121    ) {
 122        if !self.mode().is_full() || !self.semantic_token_state.enabled() {
 123            self.invalidate_semantic_tokens(None);
 124            self.display_map.update(cx, |display_map, _| {
 125                display_map.semantic_token_highlights.clear();
 126            });
 127            self.semantic_token_state.update_task = Task::ready(());
 128            cx.notify();
 129            return;
 130        }
 131
 132        let mut invalidate_semantic_highlights_for_buffers = HashSet::default();
 133        if for_server.is_some() {
 134            invalidate_semantic_highlights_for_buffers.extend(
 135                self.semantic_token_state
 136                    .fetched_for_buffers
 137                    .drain()
 138                    .map(|(buffer_id, _)| buffer_id),
 139            );
 140        }
 141
 142        let Some((sema, project)) = self.semantics_provider.clone().zip(self.project.clone())
 143        else {
 144            return;
 145        };
 146
 147        let buffers_to_query = self
 148            .visible_excerpts(true, cx)
 149            .into_values()
 150            .map(|(buffer, ..)| buffer)
 151            .chain(buffer_id.and_then(|buffer_id| self.buffer.read(cx).buffer(buffer_id)))
 152            .filter_map(|editor_buffer| {
 153                let editor_buffer_id = editor_buffer.read(cx).remote_id();
 154                if self.registered_buffers.contains_key(&editor_buffer_id)
 155                    && language_settings(
 156                        editor_buffer.read(cx).language().map(|l| l.name()),
 157                        editor_buffer.read(cx).file(),
 158                        cx,
 159                    )
 160                    .semantic_tokens
 161                    .enabled()
 162                {
 163                    Some((editor_buffer_id, editor_buffer))
 164                } else {
 165                    None
 166                }
 167            })
 168            .collect::<HashMap<_, _>>();
 169
 170        for buffer_with_disabled_tokens in self
 171            .display_map
 172            .read(cx)
 173            .semantic_token_highlights
 174            .iter()
 175            .map(|(buffer_id, _)| *buffer_id)
 176            .filter(|buffer_id| !buffers_to_query.contains_key(buffer_id))
 177            .filter(|buffer_id| {
 178                !self
 179                    .buffer
 180                    .read(cx)
 181                    .buffer(*buffer_id)
 182                    .is_some_and(|buffer| {
 183                        let buffer = buffer.read(cx);
 184                        language_settings(buffer.language().map(|l| l.name()), buffer.file(), cx)
 185                            .semantic_tokens
 186                            .enabled()
 187                    })
 188            })
 189            .collect::<Vec<_>>()
 190        {
 191            self.semantic_token_state
 192                .invalidate_buffer(&buffer_with_disabled_tokens);
 193            self.display_map.update(cx, |display_map, _| {
 194                display_map.invalidate_semantic_highlights(buffer_with_disabled_tokens);
 195            });
 196        }
 197
 198        self.semantic_token_state.update_task = cx.spawn(async move |editor, cx| {
 199            cx.background_executor()
 200                .timer(Duration::from_millis(50))
 201                .await;
 202            let Some(all_semantic_tokens_task) = editor
 203                .update(cx, |editor, cx| {
 204                    buffers_to_query
 205                        .into_iter()
 206                        .filter_map(|(buffer_id, buffer)| {
 207                            let known_version = editor
 208                                .semantic_token_state
 209                                .fetched_for_buffers
 210                                .get(&buffer_id);
 211                            let query_version = buffer.read(cx).version();
 212                            if known_version.is_some_and(|known_version| {
 213                                !query_version.changed_since(known_version)
 214                            }) {
 215                                None
 216                            } else {
 217                                let task = sema.semantic_tokens(buffer, for_server, cx);
 218                                Some(async move { (buffer_id, query_version, task.await) })
 219                            }
 220                        })
 221                        .collect::<Vec<_>>()
 222                })
 223                .ok()
 224            else {
 225                return;
 226            };
 227
 228            let all_semantic_tokens = join_all(all_semantic_tokens_task).await;
 229            editor
 230                .update(cx, |editor, cx| {
 231                    editor.display_map.update(cx, |display_map, _| {
 232                        for buffer_id in invalidate_semantic_highlights_for_buffers {
 233                            display_map.invalidate_semantic_highlights(buffer_id);
 234                            editor.semantic_token_state.invalidate_buffer(&buffer_id);
 235                        }
 236                    });
 237
 238                    if all_semantic_tokens.is_empty() {
 239                        return;
 240                    }
 241                    let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
 242
 243                    for (buffer_id, query_version, tokens) in all_semantic_tokens {
 244                        let tokens = match tokens {
 245                            Ok(BufferSemanticTokens {
 246                                tokens: Some(tokens),
 247                            }) => tokens,
 248                            Ok(BufferSemanticTokens { tokens: None }) => {
 249                                editor.display_map.update(cx, |display_map, _| {
 250                                    display_map.invalidate_semantic_highlights(buffer_id);
 251                                });
 252                                continue;
 253                            }
 254                            Err(e) => {
 255                                log::error!(
 256                                    "Failed to fetch semantic tokens for buffer \
 257                                    {buffer_id:?}: {e:#}"
 258                                );
 259                                continue;
 260                            }
 261                        };
 262
 263                        match editor
 264                            .semantic_token_state
 265                            .fetched_for_buffers
 266                            .entry(buffer_id)
 267                        {
 268                            hash_map::Entry::Occupied(mut o) => {
 269                                if query_version.changed_since(o.get()) {
 270                                    o.insert(query_version);
 271                                } else {
 272                                    continue;
 273                                }
 274                            }
 275                            hash_map::Entry::Vacant(v) => {
 276                                v.insert(query_version);
 277                            }
 278                        }
 279
 280                        let language_name = editor
 281                            .buffer()
 282                            .read(cx)
 283                            .buffer(buffer_id)
 284                            .and_then(|buf| buf.read(cx).language().map(|l| l.name()));
 285
 286                        editor.display_map.update(cx, |display_map, cx| {
 287                            project.read(cx).lsp_store().update(cx, |lsp_store, cx| {
 288                                let mut token_highlights = Vec::new();
 289                                let mut interner = HighlightStyleInterner::default();
 290                                for (server_id, server_tokens) in tokens {
 291                                    let Some(stylizer) = lsp_store.get_or_create_token_stylizer(
 292                                        server_id,
 293                                        language_name.as_ref(),
 294                                        cx,
 295                                    ) else {
 296                                        continue;
 297                                    };
 298                                    token_highlights.reserve(2 * server_tokens.len());
 299                                    token_highlights.extend(buffer_into_editor_highlights(
 300                                        &server_tokens,
 301                                        stylizer,
 302                                        &multi_buffer_snapshot,
 303                                        &mut interner,
 304                                        cx,
 305                                    ));
 306                                }
 307
 308                                token_highlights.sort_by(|a, b| {
 309                                    a.range.start.cmp(&b.range.start, &multi_buffer_snapshot)
 310                                });
 311                                display_map.semantic_token_highlights.insert(
 312                                    buffer_id,
 313                                    (Arc::from(token_highlights), Arc::new(interner)),
 314                                );
 315                            });
 316                        });
 317                    }
 318
 319                    cx.notify();
 320                })
 321                .ok();
 322        });
 323    }
 324}
 325
 326fn buffer_into_editor_highlights<'a, 'b>(
 327    buffer_tokens: &'a [BufferSemanticToken],
 328    stylizer: &'a SemanticTokenStylizer,
 329    multi_buffer_snapshot: &'a multi_buffer::MultiBufferSnapshot,
 330    interner: &'b mut HighlightStyleInterner,
 331    cx: &'a App,
 332) -> impl Iterator<Item = SemanticTokenHighlight> + use<'a, 'b> {
 333    multi_buffer_snapshot
 334        .text_anchors_to_visible_anchors(
 335            buffer_tokens
 336                .iter()
 337                .flat_map(|token| [token.range.start, token.range.end]),
 338        )
 339        .into_iter()
 340        .tuples::<(_, _)>()
 341        .zip(buffer_tokens)
 342        .filter_map(|((multi_buffer_start, multi_buffer_end), token)| {
 343            let range = multi_buffer_start?..multi_buffer_end?;
 344            let style = convert_token(
 345                stylizer,
 346                cx.theme().syntax(),
 347                token.token_type,
 348                token.token_modifiers,
 349            )?;
 350            let style = interner.intern(style);
 351            Some(SemanticTokenHighlight {
 352                range,
 353                style,
 354                token_type: token.token_type,
 355                token_modifiers: token.token_modifiers,
 356                server_id: stylizer.server_id(),
 357            })
 358        })
 359}
 360
 361fn convert_token(
 362    stylizer: &SemanticTokenStylizer,
 363    theme: &SyntaxTheme,
 364    token_type: TokenType,
 365    modifiers: u32,
 366) -> Option<HighlightStyle> {
 367    let rules = stylizer.rules_for_token(token_type)?;
 368    let matching = rules.iter().filter(|rule| {
 369        rule.token_modifiers
 370            .iter()
 371            .all(|m| stylizer.has_modifier(modifiers, m))
 372    });
 373
 374    let mut highlight = HighlightStyle::default();
 375    let mut empty = true;
 376
 377    for rule in matching {
 378        empty = false;
 379
 380        let style = rule.style.iter().find_map(|style| theme.get_opt(style));
 381
 382        macro_rules! overwrite {
 383            (
 384                highlight.$highlight_field:ident,
 385                SemanticTokenRule::$rule_field:ident,
 386                $transform:expr $(,)?
 387            ) => {
 388                highlight.$highlight_field = rule
 389                    .$rule_field
 390                    .map($transform)
 391                    .or_else(|| style.and_then(|s| s.$highlight_field))
 392                    .or(highlight.$highlight_field)
 393            };
 394        }
 395
 396        overwrite!(
 397            highlight.color,
 398            SemanticTokenRule::foreground_color,
 399            Into::into,
 400        );
 401
 402        overwrite!(
 403            highlight.background_color,
 404            SemanticTokenRule::background_color,
 405            Into::into,
 406        );
 407
 408        overwrite!(
 409            highlight.font_weight,
 410            SemanticTokenRule::font_weight,
 411            |w| match w {
 412                SemanticTokenFontWeight::Normal => FontWeight::NORMAL,
 413                SemanticTokenFontWeight::Bold => FontWeight::BOLD,
 414            },
 415        );
 416
 417        overwrite!(
 418            highlight.font_style,
 419            SemanticTokenRule::font_style,
 420            |s| match s {
 421                SemanticTokenFontStyle::Normal => FontStyle::Normal,
 422                SemanticTokenFontStyle::Italic => FontStyle::Italic,
 423            },
 424        );
 425
 426        overwrite!(highlight.underline, SemanticTokenRule::underline, |u| {
 427            UnderlineStyle {
 428                thickness: 1.0.into(),
 429                color: match u {
 430                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 431                    SemanticTokenColorOverride::InheritForeground(false) => None,
 432                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 433                },
 434                ..UnderlineStyle::default()
 435            }
 436        });
 437
 438        overwrite!(
 439            highlight.strikethrough,
 440            SemanticTokenRule::strikethrough,
 441            |s| StrikethroughStyle {
 442                thickness: 1.0.into(),
 443                color: match s {
 444                    SemanticTokenColorOverride::InheritForeground(true) => highlight.color,
 445                    SemanticTokenColorOverride::InheritForeground(false) => None,
 446                    SemanticTokenColorOverride::Replace(c) => Some(c.into()),
 447                },
 448            },
 449        );
 450    }
 451
 452    if empty { None } else { Some(highlight) }
 453}
 454
 455#[cfg(test)]
 456mod tests {
 457    use std::{
 458        ops::Range,
 459        sync::atomic::{self, AtomicUsize},
 460    };
 461
 462    use futures::StreamExt as _;
 463    use gpui::{AppContext as _, Entity, Focusable as _, HighlightStyle, TestAppContext};
 464    use language::{Language, LanguageConfig, LanguageMatcher};
 465    use languages::FakeLspAdapter;
 466    use multi_buffer::{
 467        AnchorRangeExt, ExcerptRange, ExpandExcerptDirection, MultiBuffer, MultiBufferOffset,
 468    };
 469    use project::Project;
 470    use rope::Point;
 471    use serde_json::json;
 472    use settings::{LanguageSettingsContent, SemanticTokenRules, SemanticTokens, SettingsStore};
 473    use workspace::{MultiWorkspace, WorkspaceHandle as _};
 474
 475    use crate::{
 476        Capability,
 477        editor_tests::{init_test, update_test_language_settings},
 478        test::{build_editor_with_project, editor_lsp_test_context::EditorLspTestContext},
 479    };
 480
 481    use super::*;
 482
 483    #[gpui::test]
 484    async fn lsp_semantic_tokens_full_capability(cx: &mut TestAppContext) {
 485        init_test(cx, |_| {});
 486
 487        update_test_language_settings(cx, |language_settings| {
 488            language_settings.languages.0.insert(
 489                "Rust".into(),
 490                LanguageSettingsContent {
 491                    semantic_tokens: Some(SemanticTokens::Full),
 492                    ..LanguageSettingsContent::default()
 493                },
 494            );
 495        });
 496
 497        let mut cx = EditorLspTestContext::new_rust(
 498            lsp::ServerCapabilities {
 499                semantic_tokens_provider: Some(
 500                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 501                        lsp::SemanticTokensOptions {
 502                            legend: lsp::SemanticTokensLegend {
 503                                token_types: vec!["function".into()],
 504                                token_modifiers: Vec::new(),
 505                            },
 506                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 507                            ..lsp::SemanticTokensOptions::default()
 508                        },
 509                    ),
 510                ),
 511                ..lsp::ServerCapabilities::default()
 512            },
 513            cx,
 514        )
 515        .await;
 516
 517        let full_counter = Arc::new(AtomicUsize::new(0));
 518        let full_counter_clone = full_counter.clone();
 519
 520        let mut full_request = cx
 521            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 522                move |_, _, _| {
 523                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 524                    async move {
 525                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 526                            lsp::SemanticTokens {
 527                                data: vec![
 528                                    0, // delta_line
 529                                    3, // delta_start
 530                                    4, // length
 531                                    0, // token_type
 532                                    0, // token_modifiers_bitset
 533                                ],
 534                                // The server isn't capable of deltas, so even though we sent back
 535                                // a result ID, the client shouldn't request a delta.
 536                                result_id: Some("a".into()),
 537                            },
 538                        )))
 539                    }
 540                },
 541            );
 542
 543        cx.set_state("ˇfn main() {}");
 544        assert!(full_request.next().await.is_some());
 545
 546        cx.run_until_parked();
 547
 548        cx.set_state("ˇfn main() { a }");
 549        assert!(full_request.next().await.is_some());
 550
 551        cx.run_until_parked();
 552
 553        assert_eq!(
 554            extract_semantic_highlights(&cx.editor, &cx),
 555            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 556        );
 557
 558        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 559    }
 560
 561    #[gpui::test]
 562    async fn lsp_semantic_tokens_full_none_result_id(cx: &mut TestAppContext) {
 563        init_test(cx, |_| {});
 564
 565        update_test_language_settings(cx, |language_settings| {
 566            language_settings.languages.0.insert(
 567                "Rust".into(),
 568                LanguageSettingsContent {
 569                    semantic_tokens: Some(SemanticTokens::Full),
 570                    ..LanguageSettingsContent::default()
 571                },
 572            );
 573        });
 574
 575        let mut cx = EditorLspTestContext::new_rust(
 576            lsp::ServerCapabilities {
 577                semantic_tokens_provider: Some(
 578                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 579                        lsp::SemanticTokensOptions {
 580                            legend: lsp::SemanticTokensLegend {
 581                                token_types: vec!["function".into()],
 582                                token_modifiers: Vec::new(),
 583                            },
 584                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 585                            ..lsp::SemanticTokensOptions::default()
 586                        },
 587                    ),
 588                ),
 589                ..lsp::ServerCapabilities::default()
 590            },
 591            cx,
 592        )
 593        .await;
 594
 595        let full_counter = Arc::new(AtomicUsize::new(0));
 596        let full_counter_clone = full_counter.clone();
 597
 598        let mut full_request = cx
 599            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 600                move |_, _, _| {
 601                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 602                    async move {
 603                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 604                            lsp::SemanticTokens {
 605                                data: vec![
 606                                    0, // delta_line
 607                                    3, // delta_start
 608                                    4, // length
 609                                    0, // token_type
 610                                    0, // token_modifiers_bitset
 611                                ],
 612                                result_id: None, // Sending back `None` forces the client to not use deltas.
 613                            },
 614                        )))
 615                    }
 616                },
 617            );
 618
 619        cx.set_state("ˇfn main() {}");
 620        assert!(full_request.next().await.is_some());
 621
 622        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 623        task.await;
 624
 625        cx.set_state("ˇfn main() { a }");
 626        assert!(full_request.next().await.is_some());
 627
 628        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 629        task.await;
 630        assert_eq!(
 631            extract_semantic_highlights(&cx.editor, &cx),
 632            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 633        );
 634        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 2);
 635    }
 636
 637    #[gpui::test]
 638    async fn lsp_semantic_tokens_delta(cx: &mut TestAppContext) {
 639        init_test(cx, |_| {});
 640
 641        update_test_language_settings(cx, |language_settings| {
 642            language_settings.languages.0.insert(
 643                "Rust".into(),
 644                LanguageSettingsContent {
 645                    semantic_tokens: Some(SemanticTokens::Full),
 646                    ..LanguageSettingsContent::default()
 647                },
 648            );
 649        });
 650
 651        let mut cx = EditorLspTestContext::new_rust(
 652            lsp::ServerCapabilities {
 653                semantic_tokens_provider: Some(
 654                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 655                        lsp::SemanticTokensOptions {
 656                            legend: lsp::SemanticTokensLegend {
 657                                token_types: vec!["function".into()],
 658                                token_modifiers: Vec::new(),
 659                            },
 660                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: Some(true) }),
 661                            ..lsp::SemanticTokensOptions::default()
 662                        },
 663                    ),
 664                ),
 665                ..lsp::ServerCapabilities::default()
 666            },
 667            cx,
 668        )
 669        .await;
 670
 671        let full_counter = Arc::new(AtomicUsize::new(0));
 672        let full_counter_clone = full_counter.clone();
 673        let delta_counter = Arc::new(AtomicUsize::new(0));
 674        let delta_counter_clone = delta_counter.clone();
 675
 676        let mut full_request = cx
 677            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 678                move |_, _, _| {
 679                    full_counter_clone.fetch_add(1, atomic::Ordering::Release);
 680                    async move {
 681                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 682                            lsp::SemanticTokens {
 683                                data: vec![
 684                                    0, // delta_line
 685                                    3, // delta_start
 686                                    4, // length
 687                                    0, // token_type
 688                                    0, // token_modifiers_bitset
 689                                ],
 690                                result_id: Some("a".into()),
 691                            },
 692                        )))
 693                    }
 694                },
 695            );
 696
 697        let mut delta_request = cx
 698            .set_request_handler::<lsp::request::SemanticTokensFullDeltaRequest, _, _>(
 699                move |_, params, _| {
 700                    delta_counter_clone.fetch_add(1, atomic::Ordering::Release);
 701                    assert_eq!(params.previous_result_id, "a");
 702                    async move {
 703                        Ok(Some(lsp::SemanticTokensFullDeltaResult::TokensDelta(
 704                            lsp::SemanticTokensDelta {
 705                                edits: Vec::new(),
 706                                result_id: Some("b".into()),
 707                            },
 708                        )))
 709                    }
 710                },
 711            );
 712
 713        // Initial request, for the empty buffer.
 714        cx.set_state("ˇfn main() {}");
 715        assert!(full_request.next().await.is_some());
 716        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 717        task.await;
 718
 719        cx.set_state("ˇfn main() { a }");
 720        assert!(delta_request.next().await.is_some());
 721        let task = cx.update_editor(|e, _, _| e.semantic_token_state.take_update_task());
 722        task.await;
 723
 724        assert_eq!(
 725            extract_semantic_highlights(&cx.editor, &cx),
 726            vec![MultiBufferOffset(3)..MultiBufferOffset(7)]
 727        );
 728
 729        assert_eq!(full_counter.load(atomic::Ordering::Acquire), 1);
 730        assert_eq!(delta_counter.load(atomic::Ordering::Acquire), 1);
 731    }
 732
 733    #[gpui::test]
 734    async fn lsp_semantic_tokens_multiserver_full(cx: &mut TestAppContext) {
 735        init_test(cx, |_| {});
 736
 737        update_test_language_settings(cx, |language_settings| {
 738            language_settings.languages.0.insert(
 739                "TOML".into(),
 740                LanguageSettingsContent {
 741                    semantic_tokens: Some(SemanticTokens::Full),
 742                    ..LanguageSettingsContent::default()
 743                },
 744            );
 745        });
 746
 747        let toml_language = Arc::new(Language::new(
 748            LanguageConfig {
 749                name: "TOML".into(),
 750                matcher: LanguageMatcher {
 751                    path_suffixes: vec!["toml".into()],
 752                    ..LanguageMatcher::default()
 753                },
 754                ..LanguageConfig::default()
 755            },
 756            None,
 757        ));
 758
 759        // We have 2 language servers for TOML in this test.
 760        let toml_legend_1 = lsp::SemanticTokensLegend {
 761            token_types: vec!["property".into()],
 762            token_modifiers: Vec::new(),
 763        };
 764        let toml_legend_2 = lsp::SemanticTokensLegend {
 765            token_types: vec!["number".into()],
 766            token_modifiers: Vec::new(),
 767        };
 768
 769        let app_state = cx.update(workspace::AppState::test);
 770
 771        cx.update(|cx| {
 772            assets::Assets.load_test_fonts(cx);
 773            crate::init(cx);
 774            workspace::init(app_state.clone(), cx);
 775        });
 776
 777        let project = Project::test(app_state.fs.clone(), [], cx).await;
 778        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 779
 780        let full_counter_toml_1 = Arc::new(AtomicUsize::new(0));
 781        let full_counter_toml_1_clone = full_counter_toml_1.clone();
 782        let full_counter_toml_2 = Arc::new(AtomicUsize::new(0));
 783        let full_counter_toml_2_clone = full_counter_toml_2.clone();
 784
 785        let mut toml_server_1 = language_registry.register_fake_lsp(
 786            toml_language.name(),
 787            FakeLspAdapter {
 788                name: "toml1",
 789                capabilities: lsp::ServerCapabilities {
 790                    semantic_tokens_provider: Some(
 791                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 792                            lsp::SemanticTokensOptions {
 793                                legend: toml_legend_1,
 794                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 795                                ..lsp::SemanticTokensOptions::default()
 796                            },
 797                        ),
 798                    ),
 799                    ..lsp::ServerCapabilities::default()
 800                },
 801                initializer: Some(Box::new({
 802                    let full_counter_toml_1_clone = full_counter_toml_1_clone.clone();
 803                    move |fake_server| {
 804                        let full_counter = full_counter_toml_1_clone.clone();
 805                        fake_server
 806                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 807                                move |_, _| {
 808                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 809                                    async move {
 810                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 811                                            lsp::SemanticTokens {
 812                                                // highlight 'a' as a property
 813                                                data: vec![
 814                                                    0, // delta_line
 815                                                    0, // delta_start
 816                                                    1, // length
 817                                                    0, // token_type
 818                                                    0, // token_modifiers_bitset
 819                                                ],
 820                                                result_id: Some("a".into()),
 821                                            },
 822                                        )))
 823                                    }
 824                                },
 825                            );
 826                    }
 827                })),
 828                ..FakeLspAdapter::default()
 829            },
 830        );
 831        let mut toml_server_2 = language_registry.register_fake_lsp(
 832            toml_language.name(),
 833            FakeLspAdapter {
 834                name: "toml2",
 835                capabilities: lsp::ServerCapabilities {
 836                    semantic_tokens_provider: Some(
 837                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
 838                            lsp::SemanticTokensOptions {
 839                                legend: toml_legend_2,
 840                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
 841                                ..lsp::SemanticTokensOptions::default()
 842                            },
 843                        ),
 844                    ),
 845                    ..lsp::ServerCapabilities::default()
 846                },
 847                initializer: Some(Box::new({
 848                    let full_counter_toml_2_clone = full_counter_toml_2_clone.clone();
 849                    move |fake_server| {
 850                        let full_counter = full_counter_toml_2_clone.clone();
 851                        fake_server
 852                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
 853                                move |_, _| {
 854                                    full_counter.fetch_add(1, atomic::Ordering::Release);
 855                                    async move {
 856                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
 857                                            lsp::SemanticTokens {
 858                                                // highlight '3' as a literal
 859                                                data: vec![
 860                                                    0, // delta_line
 861                                                    4, // delta_start
 862                                                    1, // length
 863                                                    0, // token_type
 864                                                    0, // token_modifiers_bitset
 865                                                ],
 866                                                result_id: Some("a".into()),
 867                                            },
 868                                        )))
 869                                    }
 870                                },
 871                            );
 872                    }
 873                })),
 874                ..FakeLspAdapter::default()
 875            },
 876        );
 877        language_registry.add(toml_language.clone());
 878
 879        app_state
 880            .fs
 881            .as_fake()
 882            .insert_tree(
 883                EditorLspTestContext::root_path(),
 884                json!({
 885                    ".git": {},
 886                    "dir": {
 887                        "foo.toml": "a = 1\nb = 2\n",
 888                    }
 889                }),
 890            )
 891            .await;
 892
 893        let (multi_workspace, cx) =
 894            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
 895        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
 896        project
 897            .update(cx, |project, cx| {
 898                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
 899            })
 900            .await
 901            .unwrap();
 902        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
 903            .await;
 904
 905        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
 906        let toml_item = workspace
 907            .update_in(cx, |workspace, window, cx| {
 908                workspace.open_path(toml_file, None, true, window, cx)
 909            })
 910            .await
 911            .expect("Could not open test file");
 912
 913        let editor = cx.update(|_, cx| {
 914            toml_item
 915                .act_as::<Editor>(cx)
 916                .expect("Opened test file wasn't an editor")
 917        });
 918
 919        editor.update_in(cx, |editor, window, cx| {
 920            let nav_history = workspace
 921                .read(cx)
 922                .active_pane()
 923                .read(cx)
 924                .nav_history_for_item(&cx.entity());
 925            editor.set_nav_history(Some(nav_history));
 926            window.focus(&editor.focus_handle(cx), cx)
 927        });
 928
 929        let _toml_server_1 = toml_server_1.next().await.unwrap();
 930        let _toml_server_2 = toml_server_2.next().await.unwrap();
 931
 932        // Trigger semantic tokens.
 933        editor.update_in(cx, |editor, _, cx| {
 934            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
 935        });
 936        cx.executor().advance_clock(Duration::from_millis(200));
 937        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
 938        cx.run_until_parked();
 939        task.await;
 940
 941        assert_eq!(
 942            extract_semantic_highlights(&editor, &cx),
 943            vec![
 944                MultiBufferOffset(0)..MultiBufferOffset(1),
 945                MultiBufferOffset(4)..MultiBufferOffset(5),
 946            ]
 947        );
 948
 949        assert_eq!(full_counter_toml_1.load(atomic::Ordering::Acquire), 1);
 950        assert_eq!(full_counter_toml_2.load(atomic::Ordering::Acquire), 1);
 951    }
 952
 953    #[gpui::test]
 954    async fn lsp_semantic_tokens_multibuffer_part(cx: &mut TestAppContext) {
 955        init_test(cx, |_| {});
 956
 957        update_test_language_settings(cx, |language_settings| {
 958            language_settings.languages.0.insert(
 959                "TOML".into(),
 960                LanguageSettingsContent {
 961                    semantic_tokens: Some(SemanticTokens::Full),
 962                    ..LanguageSettingsContent::default()
 963                },
 964            );
 965            language_settings.languages.0.insert(
 966                "Rust".into(),
 967                LanguageSettingsContent {
 968                    semantic_tokens: Some(SemanticTokens::Full),
 969                    ..LanguageSettingsContent::default()
 970                },
 971            );
 972        });
 973
 974        let toml_language = Arc::new(Language::new(
 975            LanguageConfig {
 976                name: "TOML".into(),
 977                matcher: LanguageMatcher {
 978                    path_suffixes: vec!["toml".into()],
 979                    ..LanguageMatcher::default()
 980                },
 981                ..LanguageConfig::default()
 982            },
 983            None,
 984        ));
 985        let rust_language = Arc::new(Language::new(
 986            LanguageConfig {
 987                name: "Rust".into(),
 988                matcher: LanguageMatcher {
 989                    path_suffixes: vec!["rs".into()],
 990                    ..LanguageMatcher::default()
 991                },
 992                ..LanguageConfig::default()
 993            },
 994            None,
 995        ));
 996
 997        let toml_legend = lsp::SemanticTokensLegend {
 998            token_types: vec!["property".into()],
 999            token_modifiers: Vec::new(),
1000        };
1001        let rust_legend = lsp::SemanticTokensLegend {
1002            token_types: vec!["constant".into()],
1003            token_modifiers: Vec::new(),
1004        };
1005
1006        let app_state = cx.update(workspace::AppState::test);
1007
1008        cx.update(|cx| {
1009            assets::Assets.load_test_fonts(cx);
1010            crate::init(cx);
1011            workspace::init(app_state.clone(), cx);
1012        });
1013
1014        let project = Project::test(app_state.fs.clone(), [], cx).await;
1015        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1016        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1017        let full_counter_toml_clone = full_counter_toml.clone();
1018
1019        let mut toml_server = language_registry.register_fake_lsp(
1020            toml_language.name(),
1021            FakeLspAdapter {
1022                name: "toml",
1023                capabilities: lsp::ServerCapabilities {
1024                    semantic_tokens_provider: Some(
1025                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1026                            lsp::SemanticTokensOptions {
1027                                legend: toml_legend,
1028                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1029                                ..lsp::SemanticTokensOptions::default()
1030                            },
1031                        ),
1032                    ),
1033                    ..lsp::ServerCapabilities::default()
1034                },
1035                initializer: Some(Box::new({
1036                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1037                    move |fake_server| {
1038                        let full_counter = full_counter_toml_clone.clone();
1039                        fake_server
1040                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1041                                move |_, _| {
1042                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1043                                    async move {
1044                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1045                                            lsp::SemanticTokens {
1046                                                // highlight 'a', 'b', 'c' as properties on lines 0, 1, 2
1047                                                data: vec![
1048                                                    0, // delta_line (line 0)
1049                                                    0, // delta_start
1050                                                    1, // length
1051                                                    0, // token_type
1052                                                    0, // token_modifiers_bitset
1053                                                    1, // delta_line (line 1)
1054                                                    0, // delta_start
1055                                                    1, // length
1056                                                    0, // token_type
1057                                                    0, // token_modifiers_bitset
1058                                                    1, // delta_line (line 2)
1059                                                    0, // delta_start
1060                                                    1, // length
1061                                                    0, // token_type
1062                                                    0, // token_modifiers_bitset
1063                                                ],
1064                                                result_id: Some("a".into()),
1065                                            },
1066                                        )))
1067                                    }
1068                                },
1069                            );
1070                    }
1071                })),
1072                ..FakeLspAdapter::default()
1073            },
1074        );
1075        language_registry.add(toml_language.clone());
1076        let mut rust_server = language_registry.register_fake_lsp(
1077            rust_language.name(),
1078            FakeLspAdapter {
1079                name: "rust",
1080                capabilities: lsp::ServerCapabilities {
1081                    semantic_tokens_provider: Some(
1082                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1083                            lsp::SemanticTokensOptions {
1084                                legend: rust_legend,
1085                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1086                                ..lsp::SemanticTokensOptions::default()
1087                            },
1088                        ),
1089                    ),
1090                    ..lsp::ServerCapabilities::default()
1091                },
1092                ..FakeLspAdapter::default()
1093            },
1094        );
1095        language_registry.add(rust_language.clone());
1096
1097        app_state
1098            .fs
1099            .as_fake()
1100            .insert_tree(
1101                EditorLspTestContext::root_path(),
1102                json!({
1103                    ".git": {},
1104                    "dir": {
1105                        "foo.toml": "a = 1\nb = 2\nc = 3\n",
1106                        "bar.rs": "const c: usize = 3;\n",
1107                    }
1108                }),
1109            )
1110            .await;
1111
1112        let (multi_workspace, cx) =
1113            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1114        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1115        project
1116            .update(cx, |project, cx| {
1117                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1118            })
1119            .await
1120            .unwrap();
1121        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1122            .await;
1123
1124        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[1].clone());
1125        let rust_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1126        let (toml_item, rust_item) = workspace.update_in(cx, |workspace, window, cx| {
1127            (
1128                workspace.open_path(toml_file, None, true, window, cx),
1129                workspace.open_path(rust_file, None, true, window, cx),
1130            )
1131        });
1132        let toml_item = toml_item.await.expect("Could not open test file");
1133        let rust_item = rust_item.await.expect("Could not open test file");
1134
1135        let (toml_editor, rust_editor) = cx.update(|_, cx| {
1136            (
1137                toml_item
1138                    .act_as::<Editor>(cx)
1139                    .expect("Opened test file wasn't an editor"),
1140                rust_item
1141                    .act_as::<Editor>(cx)
1142                    .expect("Opened test file wasn't an editor"),
1143            )
1144        });
1145        let toml_buffer = cx.read(|cx| {
1146            toml_editor
1147                .read(cx)
1148                .buffer()
1149                .read(cx)
1150                .as_singleton()
1151                .unwrap()
1152        });
1153        let rust_buffer = cx.read(|cx| {
1154            rust_editor
1155                .read(cx)
1156                .buffer()
1157                .read(cx)
1158                .as_singleton()
1159                .unwrap()
1160        });
1161        let multibuffer = cx.new(|cx| {
1162            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1163            multibuffer.push_excerpts(
1164                toml_buffer.clone(),
1165                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1166                cx,
1167            );
1168            multibuffer.push_excerpts(
1169                rust_buffer.clone(),
1170                [ExcerptRange::new(Point::new(0, 0)..Point::new(1, 0))],
1171                cx,
1172            );
1173            multibuffer
1174        });
1175
1176        let editor = workspace.update_in(cx, |workspace, window, cx| {
1177            let editor = cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx));
1178            workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
1179            editor
1180        });
1181        editor.update_in(cx, |editor, window, cx| {
1182            let nav_history = workspace
1183                .read(cx)
1184                .active_pane()
1185                .read(cx)
1186                .nav_history_for_item(&cx.entity());
1187            editor.set_nav_history(Some(nav_history));
1188            window.focus(&editor.focus_handle(cx), cx)
1189        });
1190
1191        let _toml_server = toml_server.next().await.unwrap();
1192        let _rust_server = rust_server.next().await.unwrap();
1193
1194        // Initial request.
1195        cx.executor().advance_clock(Duration::from_millis(200));
1196        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1197        cx.run_until_parked();
1198        task.await;
1199        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1200        cx.run_until_parked();
1201
1202        // Initially, excerpt only covers line 0, so only the 'a' token should be highlighted.
1203        // The excerpt content is "a = 1\n" (6 chars), so 'a' is at offset 0.
1204        assert_eq!(
1205            extract_semantic_highlights(&editor, &cx),
1206            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1207        );
1208
1209        // Get the excerpt id for the TOML excerpt and expand it down by 2 lines.
1210        let toml_excerpt_id =
1211            editor.read_with(cx, |editor, cx| editor.buffer().read(cx).excerpt_ids()[0]);
1212        editor.update_in(cx, |editor, _, cx| {
1213            editor.buffer().update(cx, |buffer, cx| {
1214                buffer.expand_excerpts([toml_excerpt_id], 2, ExpandExcerptDirection::Down, cx);
1215            });
1216        });
1217
1218        // Wait for semantic tokens to be re-fetched after expansion.
1219        cx.executor().advance_clock(Duration::from_millis(200));
1220        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1221        cx.run_until_parked();
1222        task.await;
1223
1224        // After expansion, the excerpt covers lines 0-2, so 'a', 'b', 'c' should all be highlighted.
1225        // Content is now "a = 1\nb = 2\nc = 3\n" (18 chars).
1226        // 'a' at offset 0, 'b' at offset 6, 'c' at offset 12.
1227        assert_eq!(
1228            extract_semantic_highlights(&editor, &cx),
1229            vec![
1230                MultiBufferOffset(0)..MultiBufferOffset(1),
1231                MultiBufferOffset(6)..MultiBufferOffset(7),
1232                MultiBufferOffset(12)..MultiBufferOffset(13),
1233            ]
1234        );
1235    }
1236
1237    #[gpui::test]
1238    async fn lsp_semantic_tokens_multibuffer_shared(cx: &mut TestAppContext) {
1239        init_test(cx, |_| {});
1240
1241        update_test_language_settings(cx, |language_settings| {
1242            language_settings.languages.0.insert(
1243                "TOML".into(),
1244                LanguageSettingsContent {
1245                    semantic_tokens: Some(SemanticTokens::Full),
1246                    ..LanguageSettingsContent::default()
1247                },
1248            );
1249        });
1250
1251        let toml_language = Arc::new(Language::new(
1252            LanguageConfig {
1253                name: "TOML".into(),
1254                matcher: LanguageMatcher {
1255                    path_suffixes: vec!["toml".into()],
1256                    ..LanguageMatcher::default()
1257                },
1258                ..LanguageConfig::default()
1259            },
1260            None,
1261        ));
1262
1263        let toml_legend = lsp::SemanticTokensLegend {
1264            token_types: vec!["property".into()],
1265            token_modifiers: Vec::new(),
1266        };
1267
1268        let app_state = cx.update(workspace::AppState::test);
1269
1270        cx.update(|cx| {
1271            assets::Assets.load_test_fonts(cx);
1272            crate::init(cx);
1273            workspace::init(app_state.clone(), cx);
1274        });
1275
1276        let project = Project::test(app_state.fs.clone(), [], cx).await;
1277        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1278        let full_counter_toml = Arc::new(AtomicUsize::new(0));
1279        let full_counter_toml_clone = full_counter_toml.clone();
1280
1281        let mut toml_server = language_registry.register_fake_lsp(
1282            toml_language.name(),
1283            FakeLspAdapter {
1284                name: "toml",
1285                capabilities: lsp::ServerCapabilities {
1286                    semantic_tokens_provider: Some(
1287                        lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1288                            lsp::SemanticTokensOptions {
1289                                legend: toml_legend,
1290                                full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1291                                ..lsp::SemanticTokensOptions::default()
1292                            },
1293                        ),
1294                    ),
1295                    ..lsp::ServerCapabilities::default()
1296                },
1297                initializer: Some(Box::new({
1298                    let full_counter_toml_clone = full_counter_toml_clone.clone();
1299                    move |fake_server| {
1300                        let full_counter = full_counter_toml_clone.clone();
1301                        fake_server
1302                            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1303                                move |_, _| {
1304                                    full_counter.fetch_add(1, atomic::Ordering::Release);
1305                                    async move {
1306                                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1307                                            lsp::SemanticTokens {
1308                                                // highlight 'a' as a property
1309                                                data: vec![
1310                                                    0, // delta_line
1311                                                    0, // delta_start
1312                                                    1, // length
1313                                                    0, // token_type
1314                                                    0, // token_modifiers_bitset
1315                                                ],
1316                                                result_id: Some("a".into()),
1317                                            },
1318                                        )))
1319                                    }
1320                                },
1321                            );
1322                    }
1323                })),
1324                ..FakeLspAdapter::default()
1325            },
1326        );
1327        language_registry.add(toml_language.clone());
1328
1329        app_state
1330            .fs
1331            .as_fake()
1332            .insert_tree(
1333                EditorLspTestContext::root_path(),
1334                json!({
1335                    ".git": {},
1336                    "dir": {
1337                        "foo.toml": "a = 1\nb = 2\n",
1338                    }
1339                }),
1340            )
1341            .await;
1342
1343        let (multi_workspace, cx) =
1344            cx.add_window_view(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
1345        let workspace = multi_workspace.read_with(cx, |mw, _| mw.workspace().clone());
1346        project
1347            .update(cx, |project, cx| {
1348                project.find_or_create_worktree(EditorLspTestContext::root_path(), true, cx)
1349            })
1350            .await
1351            .unwrap();
1352        cx.read(|cx| workspace.read(cx).worktree_scans_complete(cx))
1353            .await;
1354
1355        let toml_file = cx.read(|cx| workspace.file_project_paths(cx)[0].clone());
1356        let toml_item = workspace
1357            .update_in(cx, |workspace, window, cx| {
1358                workspace.open_path(toml_file, None, true, window, cx)
1359            })
1360            .await
1361            .expect("Could not open test file");
1362
1363        let toml_editor = cx.update(|_, cx| {
1364            toml_item
1365                .act_as::<Editor>(cx)
1366                .expect("Opened test file wasn't an editor")
1367        });
1368        let toml_buffer = cx.read(|cx| {
1369            toml_editor
1370                .read(cx)
1371                .buffer()
1372                .read(cx)
1373                .as_singleton()
1374                .unwrap()
1375        });
1376        let multibuffer = cx.new(|cx| {
1377            let mut multibuffer = MultiBuffer::new(Capability::ReadWrite);
1378            multibuffer.push_excerpts(
1379                toml_buffer.clone(),
1380                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1381                cx,
1382            );
1383            multibuffer.push_excerpts(
1384                toml_buffer.clone(),
1385                [ExcerptRange::new(Point::new(0, 0)..Point::new(2, 0))],
1386                cx,
1387            );
1388            multibuffer
1389        });
1390
1391        let editor = workspace.update_in(cx, |_, window, cx| {
1392            cx.new(|cx| build_editor_with_project(project, multibuffer, window, cx))
1393        });
1394        editor.update_in(cx, |editor, window, cx| {
1395            let nav_history = workspace
1396                .read(cx)
1397                .active_pane()
1398                .read(cx)
1399                .nav_history_for_item(&cx.entity());
1400            editor.set_nav_history(Some(nav_history));
1401            window.focus(&editor.focus_handle(cx), cx)
1402        });
1403
1404        let _toml_server = toml_server.next().await.unwrap();
1405
1406        // Initial request.
1407        cx.executor().advance_clock(Duration::from_millis(200));
1408        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1409        cx.run_until_parked();
1410        task.await;
1411        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 1);
1412
1413        // Edit two parts of the multibuffer, which both map to the same buffer.
1414        //
1415        // Without debouncing, this grabs semantic tokens 4 times (twice for the
1416        // toml editor, and twice for the multibuffer).
1417        editor.update_in(cx, |editor, _, cx| {
1418            editor.edit([(MultiBufferOffset(0)..MultiBufferOffset(1), "b")], cx);
1419            editor.edit([(MultiBufferOffset(12)..MultiBufferOffset(13), "c")], cx);
1420        });
1421        cx.executor().advance_clock(Duration::from_millis(200));
1422        let task = editor.update_in(cx, |e, _, _| e.semantic_token_state.take_update_task());
1423        cx.run_until_parked();
1424        task.await;
1425        assert_eq!(
1426            extract_semantic_highlights(&editor, &cx),
1427            vec![MultiBufferOffset(0)..MultiBufferOffset(1)]
1428        );
1429
1430        assert_eq!(full_counter_toml.load(atomic::Ordering::Acquire), 2);
1431    }
1432
1433    fn extract_semantic_highlights(
1434        editor: &Entity<Editor>,
1435        cx: &TestAppContext,
1436    ) -> Vec<Range<MultiBufferOffset>> {
1437        editor.read_with(cx, |editor, cx| {
1438            let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
1439            editor
1440                .display_map
1441                .read(cx)
1442                .semantic_token_highlights
1443                .iter()
1444                .flat_map(|(_, (v, _))| v.iter())
1445                .map(|highlights| highlights.range.to_offset(&multi_buffer_snapshot))
1446                .collect()
1447        })
1448    }
1449
1450    #[gpui::test]
1451    async fn test_semantic_tokens_rules_changes_restyle_tokens(cx: &mut TestAppContext) {
1452        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1453        use settings::{GlobalLspSettingsContent, SemanticTokenRule};
1454
1455        init_test(cx, |_| {});
1456
1457        update_test_language_settings(cx, |language_settings| {
1458            language_settings.languages.0.insert(
1459                "Rust".into(),
1460                LanguageSettingsContent {
1461                    semantic_tokens: Some(SemanticTokens::Full),
1462                    ..LanguageSettingsContent::default()
1463                },
1464            );
1465        });
1466
1467        let mut cx = EditorLspTestContext::new_rust(
1468            lsp::ServerCapabilities {
1469                semantic_tokens_provider: Some(
1470                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1471                        lsp::SemanticTokensOptions {
1472                            legend: lsp::SemanticTokensLegend {
1473                                token_types: Vec::from(["function".into()]),
1474                                token_modifiers: Vec::new(),
1475                            },
1476                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1477                            ..lsp::SemanticTokensOptions::default()
1478                        },
1479                    ),
1480                ),
1481                ..lsp::ServerCapabilities::default()
1482            },
1483            cx,
1484        )
1485        .await;
1486
1487        let mut full_request = cx
1488            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1489                move |_, _, _| {
1490                    async move {
1491                        Ok(Some(lsp::SemanticTokensResult::Tokens(
1492                            lsp::SemanticTokens {
1493                                data: vec![
1494                                    0, // delta_line
1495                                    3, // delta_start
1496                                    4, // length
1497                                    0, // token_type (function)
1498                                    0, // token_modifiers_bitset
1499                                ],
1500                                result_id: None,
1501                            },
1502                        )))
1503                    }
1504                },
1505            );
1506
1507        // Trigger initial semantic tokens fetch
1508        cx.set_state("ˇfn main() {}");
1509        full_request.next().await;
1510        cx.run_until_parked();
1511
1512        // Verify initial highlights exist (with no custom color yet)
1513        let initial_ranges = extract_semantic_highlights(&cx.editor, &cx);
1514        assert_eq!(
1515            initial_ranges,
1516            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1517            "Should have initial semantic token highlights"
1518        );
1519        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1520        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1521        // Initial color should be None or theme default (not red or blue)
1522        let initial_color = initial_styles[0].color;
1523
1524        // Set a custom foreground color for function tokens via settings.json
1525        let red_color = Rgba {
1526            r: 1.0,
1527            g: 0.0,
1528            b: 0.0,
1529            a: 1.0,
1530        };
1531        cx.update(|_, cx| {
1532            SettingsStore::update_global(cx, |store, cx| {
1533                store.update_user_settings(cx, |settings| {
1534                    settings.global_lsp_settings = Some(GlobalLspSettingsContent {
1535                        semantic_token_rules: Some(SemanticTokenRules {
1536                            rules: Vec::from([SemanticTokenRule {
1537                                token_type: Some("function".to_string()),
1538                                foreground_color: Some(red_color),
1539                                ..SemanticTokenRule::default()
1540                            }]),
1541                        }),
1542                        ..GlobalLspSettingsContent::default()
1543                    });
1544                });
1545            });
1546        });
1547
1548        // Trigger a refetch by making an edit (which forces semantic tokens update)
1549        cx.set_state("ˇfn main() { }");
1550        full_request.next().await;
1551        cx.run_until_parked();
1552
1553        // Verify the highlights now have the custom red color
1554        let styles_after_settings_change = extract_semantic_highlight_styles(&cx.editor, &cx);
1555        assert_eq!(
1556            styles_after_settings_change.len(),
1557            1,
1558            "Should still have one highlight"
1559        );
1560        assert_eq!(
1561            styles_after_settings_change[0].color,
1562            Some(Hsla::from(red_color)),
1563            "Highlight should have the custom red color from settings.json"
1564        );
1565        assert_ne!(
1566            styles_after_settings_change[0].color, initial_color,
1567            "Color should have changed from initial"
1568        );
1569    }
1570
1571    #[gpui::test]
1572    async fn test_theme_override_changes_restyle_semantic_tokens(cx: &mut TestAppContext) {
1573        use collections::IndexMap;
1574        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1575        use theme::{HighlightStyleContent, ThemeStyleContent};
1576
1577        init_test(cx, |_| {});
1578
1579        update_test_language_settings(cx, |language_settings| {
1580            language_settings.languages.0.insert(
1581                "Rust".into(),
1582                LanguageSettingsContent {
1583                    semantic_tokens: Some(SemanticTokens::Full),
1584                    ..LanguageSettingsContent::default()
1585                },
1586            );
1587        });
1588
1589        let mut cx = EditorLspTestContext::new_rust(
1590            lsp::ServerCapabilities {
1591                semantic_tokens_provider: Some(
1592                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1593                        lsp::SemanticTokensOptions {
1594                            legend: lsp::SemanticTokensLegend {
1595                                token_types: Vec::from(["function".into()]),
1596                                token_modifiers: Vec::new(),
1597                            },
1598                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1599                            ..lsp::SemanticTokensOptions::default()
1600                        },
1601                    ),
1602                ),
1603                ..lsp::ServerCapabilities::default()
1604            },
1605            cx,
1606        )
1607        .await;
1608
1609        let mut full_request = cx
1610            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1611                move |_, _, _| async move {
1612                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1613                        lsp::SemanticTokens {
1614                            data: vec![
1615                                0, // delta_line
1616                                3, // delta_start
1617                                4, // length
1618                                0, // token_type (function)
1619                                0, // token_modifiers_bitset
1620                            ],
1621                            result_id: None,
1622                        },
1623                    )))
1624                },
1625            );
1626
1627        cx.set_state("ˇfn main() {}");
1628        full_request.next().await;
1629        cx.run_until_parked();
1630
1631        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1632        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1633        let initial_color = initial_styles[0].color;
1634
1635        // Changing experimental_theme_overrides triggers GlobalTheme reload,
1636        // which fires theme_changed → refresh_semantic_token_highlights.
1637        let red_color: Hsla = Rgba {
1638            r: 1.0,
1639            g: 0.0,
1640            b: 0.0,
1641            a: 1.0,
1642        }
1643        .into();
1644        cx.update(|_, cx| {
1645            SettingsStore::update_global(cx, |store, cx| {
1646                store.update_user_settings(cx, |settings| {
1647                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1648                        syntax: IndexMap::from_iter([(
1649                            "function".to_string(),
1650                            HighlightStyleContent {
1651                                color: Some("#ff0000".to_string()),
1652                                background_color: None,
1653                                font_style: None,
1654                                font_weight: None,
1655                            },
1656                        )]),
1657                        ..ThemeStyleContent::default()
1658                    });
1659                });
1660            });
1661        });
1662
1663        cx.executor().advance_clock(Duration::from_millis(200));
1664        cx.run_until_parked();
1665
1666        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1667        assert_eq!(styles_after_override.len(), 1);
1668        assert_eq!(
1669            styles_after_override[0].color,
1670            Some(red_color),
1671            "Highlight should have red color from theme override"
1672        );
1673        assert_ne!(
1674            styles_after_override[0].color, initial_color,
1675            "Color should have changed from initial"
1676        );
1677
1678        // Changing the override to a different color also restyles.
1679        let blue_color: Hsla = Rgba {
1680            r: 0.0,
1681            g: 0.0,
1682            b: 1.0,
1683            a: 1.0,
1684        }
1685        .into();
1686        cx.update(|_, cx| {
1687            SettingsStore::update_global(cx, |store, cx| {
1688                store.update_user_settings(cx, |settings| {
1689                    settings.theme.experimental_theme_overrides = Some(ThemeStyleContent {
1690                        syntax: IndexMap::from_iter([(
1691                            "function".to_string(),
1692                            HighlightStyleContent {
1693                                color: Some("#0000ff".to_string()),
1694                                background_color: None,
1695                                font_style: None,
1696                                font_weight: None,
1697                            },
1698                        )]),
1699                        ..ThemeStyleContent::default()
1700                    });
1701                });
1702            });
1703        });
1704
1705        cx.executor().advance_clock(Duration::from_millis(200));
1706        cx.run_until_parked();
1707
1708        let styles_after_second_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1709        assert_eq!(styles_after_second_override.len(), 1);
1710        assert_eq!(
1711            styles_after_second_override[0].color,
1712            Some(blue_color),
1713            "Highlight should have blue color from updated theme override"
1714        );
1715
1716        // Removing overrides reverts to the original theme color.
1717        cx.update(|_, cx| {
1718            SettingsStore::update_global(cx, |store, cx| {
1719                store.update_user_settings(cx, |settings| {
1720                    settings.theme.experimental_theme_overrides = None;
1721                });
1722            });
1723        });
1724
1725        cx.executor().advance_clock(Duration::from_millis(200));
1726        cx.run_until_parked();
1727
1728        let styles_after_clear = extract_semantic_highlight_styles(&cx.editor, &cx);
1729        assert_eq!(styles_after_clear.len(), 1);
1730        assert_eq!(
1731            styles_after_clear[0].color, initial_color,
1732            "Highlight should revert to initial color after clearing overrides"
1733        );
1734    }
1735
1736    #[gpui::test]
1737    async fn test_per_theme_overrides_restyle_semantic_tokens(cx: &mut TestAppContext) {
1738        use collections::IndexMap;
1739        use gpui::{Hsla, Rgba, UpdateGlobal as _};
1740        use theme::{HighlightStyleContent, ThemeStyleContent};
1741        use ui::ActiveTheme as _;
1742
1743        init_test(cx, |_| {});
1744
1745        update_test_language_settings(cx, |language_settings| {
1746            language_settings.languages.0.insert(
1747                "Rust".into(),
1748                LanguageSettingsContent {
1749                    semantic_tokens: Some(SemanticTokens::Full),
1750                    ..LanguageSettingsContent::default()
1751                },
1752            );
1753        });
1754
1755        let mut cx = EditorLspTestContext::new_rust(
1756            lsp::ServerCapabilities {
1757                semantic_tokens_provider: Some(
1758                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1759                        lsp::SemanticTokensOptions {
1760                            legend: lsp::SemanticTokensLegend {
1761                                token_types: Vec::from(["function".into()]),
1762                                token_modifiers: Vec::new(),
1763                            },
1764                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1765                            ..lsp::SemanticTokensOptions::default()
1766                        },
1767                    ),
1768                ),
1769                ..lsp::ServerCapabilities::default()
1770            },
1771            cx,
1772        )
1773        .await;
1774
1775        let mut full_request = cx
1776            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1777                move |_, _, _| async move {
1778                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1779                        lsp::SemanticTokens {
1780                            data: vec![
1781                                0, // delta_line
1782                                3, // delta_start
1783                                4, // length
1784                                0, // token_type (function)
1785                                0, // token_modifiers_bitset
1786                            ],
1787                            result_id: None,
1788                        },
1789                    )))
1790                },
1791            );
1792
1793        cx.set_state("ˇfn main() {}");
1794        full_request.next().await;
1795        cx.run_until_parked();
1796
1797        let initial_styles = extract_semantic_highlight_styles(&cx.editor, &cx);
1798        assert_eq!(initial_styles.len(), 1, "Should have one highlight style");
1799        let initial_color = initial_styles[0].color;
1800
1801        // Per-theme overrides (theme_overrides keyed by theme name) also go through
1802        // GlobalTheme reload → theme_changed → refresh_semantic_token_highlights.
1803        let theme_name = cx.update(|_, cx| cx.theme().name.to_string());
1804        let green_color: Hsla = Rgba {
1805            r: 0.0,
1806            g: 1.0,
1807            b: 0.0,
1808            a: 1.0,
1809        }
1810        .into();
1811        cx.update(|_, cx| {
1812            SettingsStore::update_global(cx, |store, cx| {
1813                store.update_user_settings(cx, |settings| {
1814                    settings.theme.theme_overrides = collections::HashMap::from_iter([(
1815                        theme_name.clone(),
1816                        ThemeStyleContent {
1817                            syntax: IndexMap::from_iter([(
1818                                "function".to_string(),
1819                                HighlightStyleContent {
1820                                    color: Some("#00ff00".to_string()),
1821                                    background_color: None,
1822                                    font_style: None,
1823                                    font_weight: None,
1824                                },
1825                            )]),
1826                            ..ThemeStyleContent::default()
1827                        },
1828                    )]);
1829                });
1830            });
1831        });
1832
1833        cx.executor().advance_clock(Duration::from_millis(200));
1834        cx.run_until_parked();
1835
1836        let styles_after_override = extract_semantic_highlight_styles(&cx.editor, &cx);
1837        assert_eq!(styles_after_override.len(), 1);
1838        assert_eq!(
1839            styles_after_override[0].color,
1840            Some(green_color),
1841            "Highlight should have green color from per-theme override"
1842        );
1843        assert_ne!(
1844            styles_after_override[0].color, initial_color,
1845            "Color should have changed from initial"
1846        );
1847    }
1848
1849    #[gpui::test]
1850    async fn test_stopping_language_server_clears_semantic_tokens(cx: &mut TestAppContext) {
1851        init_test(cx, |_| {});
1852
1853        update_test_language_settings(cx, |language_settings| {
1854            language_settings.languages.0.insert(
1855                "Rust".into(),
1856                LanguageSettingsContent {
1857                    semantic_tokens: Some(SemanticTokens::Full),
1858                    ..LanguageSettingsContent::default()
1859                },
1860            );
1861        });
1862
1863        let mut cx = EditorLspTestContext::new_rust(
1864            lsp::ServerCapabilities {
1865                semantic_tokens_provider: Some(
1866                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1867                        lsp::SemanticTokensOptions {
1868                            legend: lsp::SemanticTokensLegend {
1869                                token_types: vec!["function".into()],
1870                                token_modifiers: Vec::new(),
1871                            },
1872                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1873                            ..lsp::SemanticTokensOptions::default()
1874                        },
1875                    ),
1876                ),
1877                ..lsp::ServerCapabilities::default()
1878            },
1879            cx,
1880        )
1881        .await;
1882
1883        let mut full_request = cx
1884            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1885                move |_, _, _| async move {
1886                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1887                        lsp::SemanticTokens {
1888                            data: vec![
1889                                0, // delta_line
1890                                3, // delta_start
1891                                4, // length
1892                                0, // token_type
1893                                0, // token_modifiers_bitset
1894                            ],
1895                            result_id: None,
1896                        },
1897                    )))
1898                },
1899            );
1900
1901        cx.set_state("ˇfn main() {}");
1902        assert!(full_request.next().await.is_some());
1903        cx.run_until_parked();
1904
1905        assert_eq!(
1906            extract_semantic_highlights(&cx.editor, &cx),
1907            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1908            "Semantic tokens should be present before stopping the server"
1909        );
1910
1911        cx.update_editor(|editor, _, cx| {
1912            let buffers = editor.buffer.read(cx).all_buffers().into_iter().collect();
1913            editor.project.as_ref().unwrap().update(cx, |project, cx| {
1914                project.stop_language_servers_for_buffers(buffers, HashSet::default(), cx);
1915            })
1916        });
1917        cx.executor().advance_clock(Duration::from_millis(200));
1918        cx.run_until_parked();
1919
1920        assert_eq!(
1921            extract_semantic_highlights(&cx.editor, &cx),
1922            Vec::new(),
1923            "Semantic tokens should be cleared after stopping the server"
1924        );
1925    }
1926
1927    #[gpui::test]
1928    async fn test_disabling_semantic_tokens_setting_clears_highlights(cx: &mut TestAppContext) {
1929        init_test(cx, |_| {});
1930
1931        update_test_language_settings(cx, |language_settings| {
1932            language_settings.languages.0.insert(
1933                "Rust".into(),
1934                LanguageSettingsContent {
1935                    semantic_tokens: Some(SemanticTokens::Full),
1936                    ..LanguageSettingsContent::default()
1937                },
1938            );
1939        });
1940
1941        let mut cx = EditorLspTestContext::new_rust(
1942            lsp::ServerCapabilities {
1943                semantic_tokens_provider: Some(
1944                    lsp::SemanticTokensServerCapabilities::SemanticTokensOptions(
1945                        lsp::SemanticTokensOptions {
1946                            legend: lsp::SemanticTokensLegend {
1947                                token_types: vec!["function".into()],
1948                                token_modifiers: Vec::new(),
1949                            },
1950                            full: Some(lsp::SemanticTokensFullOptions::Delta { delta: None }),
1951                            ..lsp::SemanticTokensOptions::default()
1952                        },
1953                    ),
1954                ),
1955                ..lsp::ServerCapabilities::default()
1956            },
1957            cx,
1958        )
1959        .await;
1960
1961        let mut full_request = cx
1962            .set_request_handler::<lsp::request::SemanticTokensFullRequest, _, _>(
1963                move |_, _, _| async move {
1964                    Ok(Some(lsp::SemanticTokensResult::Tokens(
1965                        lsp::SemanticTokens {
1966                            data: vec![
1967                                0, // delta_line
1968                                3, // delta_start
1969                                4, // length
1970                                0, // token_type
1971                                0, // token_modifiers_bitset
1972                            ],
1973                            result_id: None,
1974                        },
1975                    )))
1976                },
1977            );
1978
1979        cx.set_state("ˇfn main() {}");
1980        assert!(full_request.next().await.is_some());
1981        cx.run_until_parked();
1982
1983        assert_eq!(
1984            extract_semantic_highlights(&cx.editor, &cx),
1985            vec![MultiBufferOffset(3)..MultiBufferOffset(7)],
1986            "Semantic tokens should be present before disabling the setting"
1987        );
1988
1989        update_test_language_settings(&mut cx, |language_settings| {
1990            language_settings.languages.0.insert(
1991                "Rust".into(),
1992                LanguageSettingsContent {
1993                    semantic_tokens: Some(SemanticTokens::Off),
1994                    ..LanguageSettingsContent::default()
1995                },
1996            );
1997        });
1998        cx.executor().advance_clock(Duration::from_millis(200));
1999        cx.run_until_parked();
2000
2001        assert_eq!(
2002            extract_semantic_highlights(&cx.editor, &cx),
2003            Vec::new(),
2004            "Semantic tokens should be cleared after disabling the setting"
2005        );
2006    }
2007
2008    fn extract_semantic_highlight_styles(
2009        editor: &Entity<Editor>,
2010        cx: &TestAppContext,
2011    ) -> Vec<HighlightStyle> {
2012        editor.read_with(cx, |editor, cx| {
2013            editor
2014                .display_map
2015                .read(cx)
2016                .semantic_token_highlights
2017                .iter()
2018                .flat_map(|(_, (v, interner))| {
2019                    v.iter().map(|highlights| interner[highlights.style])
2020                })
2021                .collect()
2022        })
2023    }
2024}