syntax_index.rs

   1use anyhow::{Result, anyhow};
   2use collections::{HashMap, HashSet};
   3use futures::channel::mpsc;
   4use futures::lock::Mutex;
   5use futures::{FutureExt as _, StreamExt, future};
   6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
   7use itertools::Itertools;
   8use language::{Buffer, BufferEvent};
   9use postage::stream::Stream as _;
  10use project::buffer_store::{BufferStore, BufferStoreEvent};
  11use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
  12use project::{PathChange, Project, ProjectEntryId, ProjectPath};
  13use slotmap::SlotMap;
  14use std::iter;
  15use std::ops::{DerefMut, Range};
  16use std::sync::Arc;
  17use text::BufferId;
  18use util::{RangeExt as _, debug_panic, some_or_debug_panic};
  19
  20use crate::declaration::{
  21    BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
  22};
  23use crate::outline::declarations_in_buffer;
  24
  25// TODO
  26//
  27// * Also queue / debounce buffer changes. A challenge for this is that use of
  28// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
  29//
  30// * Add a per language configuration for skipping indexing.
  31
  32// Potential future improvements:
  33//
  34// * Prevent indexing of a large file from blocking the queue.
  35//
  36// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
  37// references are present and their scores.
  38
  39// Potential future optimizations:
  40//
  41// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
  42// of priority system to the background executor could help - it's single threaded for now to avoid
  43// interfering with other work.
  44//
  45// * Parse files directly instead of loading into a Rope.
  46//
  47//   - This would allow the task handling dirty_files to be done entirely on the background executor.
  48//
  49//   - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
  50//   but that can be done by scanning characters in the flat representation.
  51//
  52// * Use something similar to slotmap without key versions.
  53//
  54// * Concurrent slotmap
  55
  56pub struct SyntaxIndex {
  57    state: Arc<Mutex<SyntaxIndexState>>,
  58    project: WeakEntity<Project>,
  59    initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
  60}
  61
  62pub struct SyntaxIndexState {
  63    declarations: SlotMap<DeclarationId, Declaration>,
  64    identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
  65    files: HashMap<ProjectEntryId, FileState>,
  66    buffers: HashMap<BufferId, BufferState>,
  67    dirty_files: HashMap<ProjectEntryId, ProjectPath>,
  68    dirty_files_tx: mpsc::Sender<()>,
  69    _file_indexing_task: Option<Task<()>>,
  70}
  71
  72#[derive(Debug, Default)]
  73struct FileState {
  74    declarations: Vec<DeclarationId>,
  75}
  76
  77#[derive(Default)]
  78struct BufferState {
  79    declarations: Vec<DeclarationId>,
  80    task: Option<Task<()>>,
  81}
  82
  83impl SyntaxIndex {
  84    pub fn new(
  85        project: &Entity<Project>,
  86        file_indexing_parallelism: usize,
  87        cx: &mut Context<Self>,
  88    ) -> Self {
  89        assert!(file_indexing_parallelism > 0);
  90        let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
  91        let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
  92            postage::watch::channel();
  93
  94        let initial_state = SyntaxIndexState {
  95            declarations: SlotMap::default(),
  96            identifiers: HashMap::default(),
  97            files: HashMap::default(),
  98            buffers: HashMap::default(),
  99            dirty_files: HashMap::default(),
 100            dirty_files_tx,
 101            _file_indexing_task: None,
 102        };
 103        let this = Self {
 104            project: project.downgrade(),
 105            state: Arc::new(Mutex::new(initial_state)),
 106            initial_file_indexing_done_rx,
 107        };
 108
 109        let worktree_store = project.read(cx).worktree_store();
 110        let initial_worktree_snapshots = worktree_store
 111            .read(cx)
 112            .worktrees()
 113            .map(|w| w.read(cx).snapshot())
 114            .collect::<Vec<_>>();
 115        this.state.try_lock().unwrap()._file_indexing_task =
 116            Some(cx.spawn(async move |this, cx| {
 117                let snapshots_file_count = initial_worktree_snapshots
 118                    .iter()
 119                    .map(|worktree| worktree.file_count())
 120                    .sum::<usize>();
 121                let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
 122                let chunk_count = snapshots_file_count.div_ceil(chunk_size);
 123                let file_chunks = initial_worktree_snapshots
 124                    .iter()
 125                    .flat_map(|worktree| {
 126                        let worktree_id = worktree.id();
 127                        worktree.files(false, 0).map(move |entry| {
 128                            (
 129                                entry.id,
 130                                ProjectPath {
 131                                    worktree_id,
 132                                    path: entry.path.clone(),
 133                                },
 134                            )
 135                        })
 136                    })
 137                    .chunks(chunk_size);
 138
 139                let mut tasks = Vec::with_capacity(chunk_count);
 140                for chunk in file_chunks.into_iter() {
 141                    tasks.push(Self::update_dirty_files(
 142                        &this,
 143                        chunk.into_iter().collect(),
 144                        cx.clone(),
 145                    ));
 146                }
 147                futures::future::join_all(tasks).await;
 148
 149                log::info!("Finished initial file indexing");
 150                *initial_file_indexing_done_tx.borrow_mut() = true;
 151
 152                let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
 153                    return;
 154                };
 155                while dirty_files_rx.next().await.is_some() {
 156                    let mut state = state.lock().await;
 157                    let was_underused = state.dirty_files.capacity() > 255
 158                        && state.dirty_files.len() * 8 < state.dirty_files.capacity();
 159                    let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
 160                    if was_underused {
 161                        state.dirty_files.shrink_to_fit();
 162                    }
 163                    drop(state);
 164                    if dirty_files.is_empty() {
 165                        continue;
 166                    }
 167
 168                    let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
 169                    let chunk_count = dirty_files.len().div_ceil(chunk_size);
 170                    let mut tasks = Vec::with_capacity(chunk_count);
 171                    let chunks = dirty_files.into_iter().chunks(chunk_size);
 172                    for chunk in chunks.into_iter() {
 173                        tasks.push(Self::update_dirty_files(
 174                            &this,
 175                            chunk.into_iter().collect(),
 176                            cx.clone(),
 177                        ));
 178                    }
 179                    futures::future::join_all(tasks).await;
 180                }
 181            }));
 182
 183        cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
 184            .detach();
 185
 186        let buffer_store = project.read(cx).buffer_store().clone();
 187        for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
 188            this.register_buffer(&buffer, cx);
 189        }
 190        cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
 191            .detach();
 192
 193        this
 194    }
 195
 196    async fn update_dirty_files(
 197        this: &WeakEntity<Self>,
 198        dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
 199        mut cx: AsyncApp,
 200    ) {
 201        for (entry_id, project_path) in dirty_files {
 202            let Ok(task) = this.update(&mut cx, |this, cx| {
 203                this.update_file(entry_id, project_path, cx)
 204            }) else {
 205                return;
 206            };
 207            task.await;
 208        }
 209    }
 210
 211    pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
 212        if *self.initial_file_indexing_done_rx.borrow() {
 213            Task::ready(Ok(()))
 214        } else {
 215            let mut rx = self.initial_file_indexing_done_rx.clone();
 216            cx.background_spawn(async move {
 217                loop {
 218                    match rx.recv().await {
 219                        Some(true) => return Ok(()),
 220                        Some(false) => {}
 221                        None => {
 222                            return Err(anyhow!(
 223                                "SyntaxIndex dropped while waiting for initial file indexing"
 224                            ));
 225                        }
 226                    }
 227                }
 228            })
 229        }
 230    }
 231
 232    pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
 233        let state = self.state.clone();
 234        let project = self.project.clone();
 235
 236        cx.spawn(async move |cx| {
 237            let state = state.lock().await;
 238            let Some(project) = project.upgrade() else {
 239                return vec![];
 240            };
 241            project
 242                .read_with(cx, |project, cx| {
 243                    state
 244                        .files
 245                        .keys()
 246                        .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
 247                        .collect()
 248                })
 249                .unwrap_or_default()
 250        })
 251    }
 252
 253    fn handle_worktree_store_event(
 254        &mut self,
 255        _worktree_store: Entity<WorktreeStore>,
 256        event: &WorktreeStoreEvent,
 257        cx: &mut Context<Self>,
 258    ) {
 259        use WorktreeStoreEvent::*;
 260        match event {
 261            WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
 262                let state = Arc::downgrade(&self.state);
 263                let worktree_id = *worktree_id;
 264                let updated_entries_set = updated_entries_set.clone();
 265                cx.background_spawn(async move {
 266                    let Some(state) = state.upgrade() else { return };
 267                    let mut state = state.lock().await;
 268                    for (path, entry_id, path_change) in updated_entries_set.iter() {
 269                        if let PathChange::Removed = path_change {
 270                            state.files.remove(entry_id);
 271                            state.dirty_files.remove(entry_id);
 272                        } else {
 273                            let project_path = ProjectPath {
 274                                worktree_id,
 275                                path: path.clone(),
 276                            };
 277                            state.dirty_files.insert(*entry_id, project_path);
 278                        }
 279                    }
 280                    match state.dirty_files_tx.try_send(()) {
 281                        Err(err) if err.is_disconnected() => {
 282                            log::error!("bug: syntax indexing queue is disconnected");
 283                        }
 284                        _ => {}
 285                    }
 286                })
 287                .detach();
 288            }
 289            WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
 290                let project_entry_id = *project_entry_id;
 291                self.with_state(cx, move |state| {
 292                    state.files.remove(&project_entry_id);
 293                })
 294            }
 295            _ => {}
 296        }
 297    }
 298
 299    fn handle_buffer_store_event(
 300        &mut self,
 301        _buffer_store: Entity<BufferStore>,
 302        event: &BufferStoreEvent,
 303        cx: &mut Context<Self>,
 304    ) {
 305        use BufferStoreEvent::*;
 306        match event {
 307            BufferAdded(buffer) => self.register_buffer(buffer, cx),
 308            BufferOpened { .. }
 309            | BufferChangedFilePath { .. }
 310            | BufferDropped { .. }
 311            | SharedBufferClosed { .. } => {}
 312        }
 313    }
 314
 315    pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
 316        &self.state
 317    }
 318
 319    fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
 320        if let Some(mut state) = self.state.try_lock() {
 321            f(&mut state);
 322            return;
 323        }
 324        let state = Arc::downgrade(&self.state);
 325        cx.background_spawn(async move {
 326            let Some(state) = state.upgrade() else {
 327                return;
 328            };
 329            let mut state = state.lock().await;
 330            f(&mut state)
 331        })
 332        .detach();
 333    }
 334
 335    fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
 336        let buffer_id = buffer.read(cx).remote_id();
 337        cx.observe_release(buffer, move |this, _buffer, cx| {
 338            this.with_state(cx, move |state| {
 339                if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
 340                    SyntaxIndexState::remove_buffer_declarations(
 341                        &buffer_state.declarations,
 342                        &mut state.declarations,
 343                        &mut state.identifiers,
 344                    );
 345                }
 346            })
 347        })
 348        .detach();
 349        cx.subscribe(buffer, Self::handle_buffer_event).detach();
 350
 351        self.update_buffer(buffer.clone(), cx);
 352    }
 353
 354    fn handle_buffer_event(
 355        &mut self,
 356        buffer: Entity<Buffer>,
 357        event: &BufferEvent,
 358        cx: &mut Context<Self>,
 359    ) {
 360        match event {
 361            BufferEvent::Edited => self.update_buffer(buffer, cx),
 362            _ => {}
 363        }
 364    }
 365
 366    fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
 367        let buffer = buffer_entity.read(cx);
 368        if buffer.language().is_none() {
 369            return;
 370        }
 371
 372        let Some(project_entry_id) =
 373            project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
 374        else {
 375            return;
 376        };
 377        let buffer_id = buffer.remote_id();
 378
 379        let mut parse_status = buffer.parse_status();
 380        let snapshot_task = cx.spawn({
 381            let weak_buffer = buffer_entity.downgrade();
 382            async move |_, cx| {
 383                while *parse_status.borrow() != language::ParseStatus::Idle {
 384                    parse_status.changed().await?;
 385                }
 386                weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 387            }
 388        });
 389
 390        let state = Arc::downgrade(&self.state);
 391        let task = cx.background_spawn(async move {
 392            // TODO: How to handle errors?
 393            let Ok(snapshot) = snapshot_task.await else {
 394                return;
 395            };
 396            let rope = snapshot.text.as_rope();
 397
 398            let declarations = declarations_in_buffer(&snapshot)
 399                .into_iter()
 400                .map(|item| {
 401                    (
 402                        item.parent_index,
 403                        BufferDeclaration::from_outline(item, &rope),
 404                    )
 405                })
 406                .collect::<Vec<_>>();
 407
 408            let Some(state) = state.upgrade() else {
 409                return;
 410            };
 411            let mut state = state.lock().await;
 412            let state = state.deref_mut();
 413
 414            let buffer_state = state
 415                .buffers
 416                .entry(buffer_id)
 417                .or_insert_with(Default::default);
 418
 419            SyntaxIndexState::remove_buffer_declarations(
 420                &buffer_state.declarations,
 421                &mut state.declarations,
 422                &mut state.identifiers,
 423            );
 424
 425            let mut new_ids = Vec::with_capacity(declarations.len());
 426            state.declarations.reserve(declarations.len());
 427            for (parent_index, mut declaration) in declarations {
 428                declaration.parent =
 429                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 430
 431                let identifier = declaration.identifier.clone();
 432                let declaration_id = state.declarations.insert(Declaration::Buffer {
 433                    rope: rope.clone(),
 434                    buffer_id,
 435                    declaration,
 436                    project_entry_id,
 437                });
 438                new_ids.push(declaration_id);
 439
 440                state
 441                    .identifiers
 442                    .entry(identifier)
 443                    .or_default()
 444                    .insert(declaration_id);
 445            }
 446
 447            buffer_state.declarations = new_ids;
 448        });
 449
 450        self.with_state(cx, move |state| {
 451            state
 452                .buffers
 453                .entry(buffer_id)
 454                .or_insert_with(Default::default)
 455                .task = Some(task)
 456        });
 457    }
 458
 459    fn update_file(
 460        &mut self,
 461        entry_id: ProjectEntryId,
 462        project_path: ProjectPath,
 463        cx: &mut Context<Self>,
 464    ) -> Task<()> {
 465        let Some(project) = self.project.upgrade() else {
 466            return Task::ready(());
 467        };
 468        let project = project.read(cx);
 469
 470        let language_registry = project.languages();
 471        let Some(available_language) =
 472            language_registry.language_for_file_path(project_path.path.as_std_path())
 473        else {
 474            return Task::ready(());
 475        };
 476        let language = if let Some(Ok(Ok(language))) = language_registry
 477            .load_language(&available_language)
 478            .now_or_never()
 479        {
 480            if language
 481                .grammar()
 482                .is_none_or(|grammar| grammar.outline_config.is_none())
 483            {
 484                return Task::ready(());
 485            }
 486            future::Either::Left(async { Ok(language) })
 487        } else {
 488            let language_registry = language_registry.clone();
 489            future::Either::Right(async move {
 490                anyhow::Ok(
 491                    language_registry
 492                        .load_language(&available_language)
 493                        .await??,
 494                )
 495            })
 496        };
 497
 498        let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
 499            return Task::ready(());
 500        };
 501
 502        let snapshot_task = worktree.update(cx, |worktree, cx| {
 503            let load_task = worktree.load_file(&project_path.path, cx);
 504            cx.spawn(async move |_this, cx| {
 505                let loaded_file = load_task.await?;
 506                let language = language.await?;
 507
 508                let buffer = cx.new(|cx| {
 509                    let mut buffer = Buffer::local(loaded_file.text, cx);
 510                    buffer.set_language(Some(language), cx);
 511                    buffer
 512                })?;
 513
 514                let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
 515                while *parse_status.borrow() != language::ParseStatus::Idle {
 516                    parse_status.changed().await?;
 517                }
 518
 519                buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 520            })
 521        });
 522
 523        let state = Arc::downgrade(&self.state);
 524        cx.background_spawn(async move {
 525            // TODO: How to handle errors?
 526            let Ok(snapshot) = snapshot_task.await else {
 527                return;
 528            };
 529            let rope = snapshot.as_rope();
 530            let declarations = declarations_in_buffer(&snapshot)
 531                .into_iter()
 532                .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
 533                .collect::<Vec<_>>();
 534
 535            let Some(state) = state.upgrade() else {
 536                return;
 537            };
 538            let mut state = state.lock().await;
 539            let state = state.deref_mut();
 540
 541            let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
 542            for old_declaration_id in &file_state.declarations {
 543                let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
 544                    debug_panic!("declaration not found");
 545                    continue;
 546                };
 547                if let Some(identifier_declarations) =
 548                    state.identifiers.get_mut(declaration.identifier())
 549                {
 550                    identifier_declarations.remove(old_declaration_id);
 551                }
 552            }
 553
 554            let mut new_ids = Vec::with_capacity(declarations.len());
 555            state.declarations.reserve(declarations.len());
 556            for (parent_index, mut declaration) in declarations {
 557                declaration.parent =
 558                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 559
 560                let identifier = declaration.identifier.clone();
 561                let declaration_id = state.declarations.insert(Declaration::File {
 562                    project_entry_id: entry_id,
 563                    declaration,
 564                });
 565                new_ids.push(declaration_id);
 566
 567                state
 568                    .identifiers
 569                    .entry(identifier)
 570                    .or_default()
 571                    .insert(declaration_id);
 572            }
 573            file_state.declarations = new_ids;
 574        })
 575    }
 576}
 577
 578impl SyntaxIndexState {
 579    pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
 580        self.declarations.get(id)
 581    }
 582
 583    /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
 584    ///
 585    /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
 586    pub fn declarations_for_identifier<const N: usize>(
 587        &self,
 588        identifier: &Identifier,
 589    ) -> Vec<(DeclarationId, &Declaration)> {
 590        // make sure to not have a large stack allocation
 591        assert!(N < 32);
 592
 593        let Some(declaration_ids) = self.identifiers.get(&identifier) else {
 594            return vec![];
 595        };
 596
 597        let mut result = Vec::with_capacity(N);
 598        let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
 599        let mut file_declarations = Vec::new();
 600
 601        for declaration_id in declaration_ids {
 602            let declaration = self.declarations.get(*declaration_id);
 603            let Some(declaration) = some_or_debug_panic(declaration) else {
 604                continue;
 605            };
 606            match declaration {
 607                Declaration::Buffer {
 608                    project_entry_id, ..
 609                } => {
 610                    included_buffer_entry_ids.push(*project_entry_id);
 611                    result.push((*declaration_id, declaration));
 612                    if result.len() == N {
 613                        return Vec::new();
 614                    }
 615                }
 616                Declaration::File {
 617                    project_entry_id, ..
 618                } => {
 619                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 620                        file_declarations.push((*declaration_id, declaration));
 621                    }
 622                }
 623            }
 624        }
 625
 626        for (declaration_id, declaration) in file_declarations {
 627            match declaration {
 628                Declaration::File {
 629                    project_entry_id, ..
 630                } => {
 631                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 632                        result.push((declaration_id, declaration));
 633
 634                        if result.len() == N {
 635                            return Vec::new();
 636                        }
 637                    }
 638                }
 639                Declaration::Buffer { .. } => {}
 640            }
 641        }
 642
 643        result
 644    }
 645
 646    pub fn buffer_declarations_containing_range(
 647        &self,
 648        buffer_id: BufferId,
 649        range: Range<usize>,
 650    ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
 651        let Some(buffer_state) = self.buffers.get(&buffer_id) else {
 652            return itertools::Either::Left(iter::empty());
 653        };
 654
 655        let iter = buffer_state
 656            .declarations
 657            .iter()
 658            .filter_map(move |declaration_id| {
 659                let Some(declaration) = self
 660                    .declarations
 661                    .get(*declaration_id)
 662                    .and_then(|d| d.as_buffer())
 663                else {
 664                    log::error!("bug: missing buffer outline declaration");
 665                    return None;
 666                };
 667                if declaration.item_range.contains_inclusive(&range) {
 668                    return Some((*declaration_id, declaration));
 669                }
 670                return None;
 671            });
 672        itertools::Either::Right(iter)
 673    }
 674
 675    pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
 676        match declaration {
 677            Declaration::File {
 678                project_entry_id, ..
 679            } => self
 680                .files
 681                .get(project_entry_id)
 682                .map(|file_state| file_state.declarations.len())
 683                .unwrap_or_default(),
 684            Declaration::Buffer { buffer_id, .. } => self
 685                .buffers
 686                .get(buffer_id)
 687                .map(|buffer_state| buffer_state.declarations.len())
 688                .unwrap_or_default(),
 689        }
 690    }
 691
 692    fn remove_buffer_declarations(
 693        old_declaration_ids: &[DeclarationId],
 694        declarations: &mut SlotMap<DeclarationId, Declaration>,
 695        identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
 696    ) {
 697        for old_declaration_id in old_declaration_ids {
 698            let Some(declaration) = declarations.remove(*old_declaration_id) else {
 699                debug_panic!("declaration not found");
 700                continue;
 701            };
 702            if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
 703                identifier_declarations.remove(old_declaration_id);
 704            }
 705        }
 706    }
 707}
 708
 709#[cfg(test)]
 710mod tests {
 711    use super::*;
 712    use std::sync::Arc;
 713
 714    use gpui::TestAppContext;
 715    use indoc::indoc;
 716    use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
 717    use project::{FakeFs, Project};
 718    use serde_json::json;
 719    use settings::SettingsStore;
 720    use text::OffsetRangeExt as _;
 721    use util::{path, rel_path::rel_path};
 722
 723    use crate::syntax_index::SyntaxIndex;
 724
 725    #[gpui::test]
 726    async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
 727        let (project, index, rust_lang_id) = init_test(cx).await;
 728        let main = Identifier {
 729            name: "main".into(),
 730            language_id: rust_lang_id,
 731        };
 732
 733        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 734        let index_state = index_state.lock().await;
 735        cx.update(|cx| {
 736            let decls = index_state.declarations_for_identifier::<8>(&main);
 737            assert_eq!(decls.len(), 2);
 738
 739            let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
 740            assert_eq!(decl.identifier, main);
 741            assert_eq!(decl.item_range, 0..98);
 742
 743            let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
 744            assert_eq!(decl.identifier, main.clone());
 745            assert_eq!(decl.item_range, 32..280);
 746        });
 747    }
 748
 749    #[gpui::test]
 750    async fn test_parents_in_file(cx: &mut TestAppContext) {
 751        let (project, index, rust_lang_id) = init_test(cx).await;
 752        let test_process_data = Identifier {
 753            name: "test_process_data".into(),
 754            language_id: rust_lang_id,
 755        };
 756
 757        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 758        let index_state = index_state.lock().await;
 759        cx.update(|cx| {
 760            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 761            assert_eq!(decls.len(), 1);
 762
 763            let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
 764            assert_eq!(decl.identifier, test_process_data);
 765
 766            let parent_id = decl.parent.unwrap();
 767            let parent = index_state.declaration(parent_id).unwrap();
 768            let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
 769            assert_eq!(
 770                parent_decl.identifier,
 771                Identifier {
 772                    name: "tests".into(),
 773                    language_id: rust_lang_id
 774                }
 775            );
 776            assert_eq!(parent_decl.parent, None);
 777        });
 778    }
 779
 780    #[gpui::test]
 781    async fn test_parents_in_buffer(cx: &mut TestAppContext) {
 782        let (project, index, rust_lang_id) = init_test(cx).await;
 783        let test_process_data = Identifier {
 784            name: "test_process_data".into(),
 785            language_id: rust_lang_id,
 786        };
 787
 788        let buffer = project
 789            .update(cx, |project, cx| {
 790                let project_path = project.find_project_path("c.rs", cx).unwrap();
 791                project.open_buffer(project_path, cx)
 792            })
 793            .await
 794            .unwrap();
 795
 796        cx.run_until_parked();
 797
 798        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 799        let index_state = index_state.lock().await;
 800        cx.update(|cx| {
 801            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 802            assert_eq!(decls.len(), 1);
 803
 804            let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 805            assert_eq!(decl.identifier, test_process_data);
 806
 807            let parent_id = decl.parent.unwrap();
 808            let parent = index_state.declaration(parent_id).unwrap();
 809            let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
 810            assert_eq!(
 811                parent_decl.identifier,
 812                Identifier {
 813                    name: "tests".into(),
 814                    language_id: rust_lang_id
 815                }
 816            );
 817            assert_eq!(parent_decl.parent, None);
 818        });
 819
 820        drop(buffer);
 821    }
 822
 823    #[gpui::test]
 824    async fn test_declarations_limt(cx: &mut TestAppContext) {
 825        let (_, index, rust_lang_id) = init_test(cx).await;
 826
 827        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 828        let index_state = index_state.lock().await;
 829        let decls = index_state.declarations_for_identifier::<1>(&Identifier {
 830            name: "main".into(),
 831            language_id: rust_lang_id,
 832        });
 833        assert_eq!(decls.len(), 0);
 834    }
 835
 836    #[gpui::test]
 837    async fn test_buffer_shadow(cx: &mut TestAppContext) {
 838        let (project, index, rust_lang_id) = init_test(cx).await;
 839
 840        let main = Identifier {
 841            name: "main".into(),
 842            language_id: rust_lang_id,
 843        };
 844
 845        let buffer = project
 846            .update(cx, |project, cx| {
 847                let project_path = project.find_project_path("c.rs", cx).unwrap();
 848                project.open_buffer(project_path, cx)
 849            })
 850            .await
 851            .unwrap();
 852
 853        cx.run_until_parked();
 854
 855        let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
 856        {
 857            let index_state = index_state_arc.lock().await;
 858
 859            cx.update(|cx| {
 860                let decls = index_state.declarations_for_identifier::<8>(&main);
 861                assert_eq!(decls.len(), 2);
 862                let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 863                assert_eq!(decl.identifier, main);
 864                assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
 865
 866                expect_file_decl("a.rs", &decls[1].1, &project, cx);
 867            });
 868        }
 869
 870        // Drop the buffer and wait for release
 871        cx.update(|_| {
 872            drop(buffer);
 873        });
 874        cx.run_until_parked();
 875
 876        let index_state = index_state_arc.lock().await;
 877
 878        cx.update(|cx| {
 879            let decls = index_state.declarations_for_identifier::<8>(&main);
 880            assert_eq!(decls.len(), 2);
 881            expect_file_decl("a.rs", &decls[0].1, &project, cx);
 882            expect_file_decl("c.rs", &decls[1].1, &project, cx);
 883        });
 884    }
 885
 886    fn expect_buffer_decl<'a>(
 887        path: &str,
 888        declaration: &'a Declaration,
 889        project: &Entity<Project>,
 890        cx: &App,
 891    ) -> &'a BufferDeclaration {
 892        if let Declaration::Buffer {
 893            declaration,
 894            project_entry_id,
 895            ..
 896        } = declaration
 897        {
 898            let project_path = project
 899                .read(cx)
 900                .path_for_entry(*project_entry_id, cx)
 901                .unwrap();
 902            assert_eq!(project_path.path.as_ref(), rel_path(path),);
 903            declaration
 904        } else {
 905            panic!("Expected a buffer declaration, found {:?}", declaration);
 906        }
 907    }
 908
 909    fn expect_file_decl<'a>(
 910        path: &str,
 911        declaration: &'a Declaration,
 912        project: &Entity<Project>,
 913        cx: &App,
 914    ) -> &'a FileDeclaration {
 915        if let Declaration::File {
 916            declaration,
 917            project_entry_id: file,
 918        } = declaration
 919        {
 920            assert_eq!(
 921                project
 922                    .read(cx)
 923                    .path_for_entry(*file, cx)
 924                    .unwrap()
 925                    .path
 926                    .as_ref(),
 927                rel_path(path),
 928            );
 929            declaration
 930        } else {
 931            panic!("Expected a file declaration, found {:?}", declaration);
 932        }
 933    }
 934
 935    async fn init_test(
 936        cx: &mut TestAppContext,
 937    ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
 938        cx.update(|cx| {
 939            let settings_store = SettingsStore::test(cx);
 940            cx.set_global(settings_store);
 941            language::init(cx);
 942            Project::init_settings(cx);
 943        });
 944
 945        let fs = FakeFs::new(cx.executor());
 946        fs.insert_tree(
 947            path!("/root"),
 948            json!({
 949                "a.rs": indoc! {r#"
 950                    fn main() {
 951                        let x = 1;
 952                        let y = 2;
 953                        let z = add(x, y);
 954                        println!("Result: {}", z);
 955                    }
 956
 957                    fn add(a: i32, b: i32) -> i32 {
 958                        a + b
 959                    }
 960                "#},
 961                "b.rs": indoc! {"
 962                    pub struct Config {
 963                        pub name: String,
 964                        pub value: i32,
 965                    }
 966
 967                    impl Config {
 968                        pub fn new(name: String, value: i32) -> Self {
 969                            Config { name, value }
 970                        }
 971                    }
 972                "},
 973                "c.rs": indoc! {r#"
 974                    use std::collections::HashMap;
 975
 976                    fn main() {
 977                        let args: Vec<String> = std::env::args().collect();
 978                        let data: Vec<i32> = args[1..]
 979                            .iter()
 980                            .filter_map(|s| s.parse().ok())
 981                            .collect();
 982                        let result = process_data(data);
 983                        println!("{:?}", result);
 984                    }
 985
 986                    fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
 987                        let mut counts = HashMap::new();
 988                        for value in data {
 989                            *counts.entry(value).or_insert(0) += 1;
 990                        }
 991                        counts
 992                    }
 993
 994                    #[cfg(test)]
 995                    mod tests {
 996                        use super::*;
 997
 998                        #[test]
 999                        fn test_process_data() {
1000                            let data = vec![1, 2, 2, 3];
1001                            let result = process_data(data);
1002                            assert_eq!(result.get(&2), Some(&2));
1003                        }
1004                    }
1005                "#}
1006            }),
1007        )
1008        .await;
1009        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1010        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1011        let lang = rust_lang();
1012        let lang_id = lang.id();
1013        language_registry.add(Arc::new(lang));
1014
1015        let file_indexing_parallelism = 2;
1016        let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1017        cx.run_until_parked();
1018
1019        (project, index, lang_id)
1020    }
1021
1022    fn rust_lang() -> Language {
1023        Language::new(
1024            LanguageConfig {
1025                name: "Rust".into(),
1026                matcher: LanguageMatcher {
1027                    path_suffixes: vec!["rs".to_string()],
1028                    ..Default::default()
1029                },
1030                ..Default::default()
1031            },
1032            Some(tree_sitter_rust::LANGUAGE.into()),
1033        )
1034        .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1035        .unwrap()
1036    }
1037}