syntax_index.rs

   1use anyhow::{Result, anyhow};
   2use collections::{HashMap, HashSet};
   3use futures::channel::mpsc;
   4use futures::lock::Mutex;
   5use futures::{FutureExt as _, StreamExt, future};
   6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
   7use itertools::Itertools;
   8use language::{Buffer, BufferEvent};
   9use postage::stream::Stream as _;
  10use project::buffer_store::{BufferStore, BufferStoreEvent};
  11use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
  12use project::{PathChange, Project, ProjectEntryId, ProjectPath};
  13use slotmap::SlotMap;
  14use std::iter;
  15use std::ops::{DerefMut, Range};
  16use std::sync::Arc;
  17use text::BufferId;
  18use util::{RangeExt as _, debug_panic, some_or_debug_panic};
  19
  20use crate::declaration::{
  21    BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
  22};
  23use crate::outline::declarations_in_buffer;
  24
  25// TODO
  26//
  27// * Also queue / debounce buffer changes. A challenge for this is that use of
  28// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
  29//
  30// * Add a per language configuration for skipping indexing.
  31
  32// Potential future improvements:
  33//
  34// * Prevent indexing of a large file from blocking the queue.
  35//
  36// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
  37// references are present and their scores.
  38
  39// Potential future optimizations:
  40//
  41// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
  42// of priority system to the background executor could help - it's single threaded for now to avoid
  43// interfering with other work.
  44//
  45// * Parse files directly instead of loading into a Rope.
  46//
  47//   - This would allow the task handling dirty_files to be done entirely on the background executor.
  48//
  49//   - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
  50//   but that can be done by scanning characters in the flat representation.
  51//
  52// * Use something similar to slotmap without key versions.
  53//
  54// * Concurrent slotmap
  55
  56pub struct SyntaxIndex {
  57    state: Arc<Mutex<SyntaxIndexState>>,
  58    project: WeakEntity<Project>,
  59    initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
  60}
  61
  62pub struct SyntaxIndexState {
  63    declarations: SlotMap<DeclarationId, Declaration>,
  64    identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
  65    files: HashMap<ProjectEntryId, FileState>,
  66    buffers: HashMap<BufferId, BufferState>,
  67    dirty_files: HashMap<ProjectEntryId, ProjectPath>,
  68    dirty_files_tx: mpsc::Sender<()>,
  69    _file_indexing_task: Option<Task<()>>,
  70}
  71
  72#[derive(Debug, Default)]
  73struct FileState {
  74    declarations: Vec<DeclarationId>,
  75}
  76
  77#[derive(Default)]
  78struct BufferState {
  79    declarations: Vec<DeclarationId>,
  80    task: Option<Task<()>>,
  81}
  82
  83impl SyntaxIndex {
  84    pub fn new(
  85        project: &Entity<Project>,
  86        file_indexing_parallelism: usize,
  87        cx: &mut Context<Self>,
  88    ) -> Self {
  89        assert!(file_indexing_parallelism > 0);
  90        let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
  91        let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
  92            postage::watch::channel();
  93
  94        let initial_state = SyntaxIndexState {
  95            declarations: SlotMap::default(),
  96            identifiers: HashMap::default(),
  97            files: HashMap::default(),
  98            buffers: HashMap::default(),
  99            dirty_files: HashMap::default(),
 100            dirty_files_tx,
 101            _file_indexing_task: None,
 102        };
 103        let this = Self {
 104            project: project.downgrade(),
 105            state: Arc::new(Mutex::new(initial_state)),
 106            initial_file_indexing_done_rx,
 107        };
 108
 109        let worktree_store = project.read(cx).worktree_store();
 110        let initial_worktree_snapshots = worktree_store
 111            .read(cx)
 112            .worktrees()
 113            .map(|w| w.read(cx).snapshot())
 114            .collect::<Vec<_>>();
 115        this.state.try_lock().unwrap()._file_indexing_task =
 116            Some(cx.spawn(async move |this, cx| {
 117                let snapshots_file_count = initial_worktree_snapshots
 118                    .iter()
 119                    .map(|worktree| worktree.file_count())
 120                    .sum::<usize>();
 121                let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
 122                let chunk_count = snapshots_file_count.div_ceil(chunk_size);
 123                let file_chunks = initial_worktree_snapshots
 124                    .iter()
 125                    .flat_map(|worktree| {
 126                        let worktree_id = worktree.id();
 127                        worktree.files(false, 0).map(move |entry| {
 128                            (
 129                                entry.id,
 130                                ProjectPath {
 131                                    worktree_id,
 132                                    path: entry.path.clone(),
 133                                },
 134                            )
 135                        })
 136                    })
 137                    .chunks(chunk_size);
 138
 139                let mut tasks = Vec::with_capacity(chunk_count);
 140                for chunk in file_chunks.into_iter() {
 141                    tasks.push(Self::update_dirty_files(
 142                        &this,
 143                        chunk.into_iter().collect(),
 144                        cx.clone(),
 145                    ));
 146                }
 147                futures::future::join_all(tasks).await;
 148
 149                log::info!("Finished initial file indexing");
 150                *initial_file_indexing_done_tx.borrow_mut() = true;
 151
 152                let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
 153                    return;
 154                };
 155                while dirty_files_rx.next().await.is_some() {
 156                    let mut state = state.lock().await;
 157                    let was_underused = state.dirty_files.capacity() > 255
 158                        && state.dirty_files.len() * 8 < state.dirty_files.capacity();
 159                    let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
 160                    if was_underused {
 161                        state.dirty_files.shrink_to_fit();
 162                    }
 163                    drop(state);
 164                    if dirty_files.is_empty() {
 165                        continue;
 166                    }
 167
 168                    let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
 169                    let chunk_count = dirty_files.len().div_ceil(chunk_size);
 170                    let mut tasks = Vec::with_capacity(chunk_count);
 171                    let chunks = dirty_files.into_iter().chunks(chunk_size);
 172                    for chunk in chunks.into_iter() {
 173                        tasks.push(Self::update_dirty_files(
 174                            &this,
 175                            chunk.into_iter().collect(),
 176                            cx.clone(),
 177                        ));
 178                    }
 179                    futures::future::join_all(tasks).await;
 180                }
 181            }));
 182
 183        cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
 184            .detach();
 185
 186        let buffer_store = project.read(cx).buffer_store().clone();
 187        for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
 188            this.register_buffer(&buffer, cx);
 189        }
 190        cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
 191            .detach();
 192
 193        this
 194    }
 195
 196    async fn update_dirty_files(
 197        this: &WeakEntity<Self>,
 198        dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
 199        mut cx: AsyncApp,
 200    ) {
 201        for (entry_id, project_path) in dirty_files {
 202            let Ok(task) = this.update(&mut cx, |this, cx| {
 203                this.update_file(entry_id, project_path, cx)
 204            }) else {
 205                return;
 206            };
 207            task.await;
 208        }
 209    }
 210
 211    pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
 212        if *self.initial_file_indexing_done_rx.borrow() {
 213            Task::ready(Ok(()))
 214        } else {
 215            let mut rx = self.initial_file_indexing_done_rx.clone();
 216            cx.background_spawn(async move {
 217                loop {
 218                    match rx.recv().await {
 219                        Some(true) => return Ok(()),
 220                        Some(false) => {}
 221                        None => {
 222                            return Err(anyhow!(
 223                                "SyntaxIndex dropped while waiting for initial file indexing"
 224                            ));
 225                        }
 226                    }
 227                }
 228            })
 229        }
 230    }
 231
 232    fn handle_worktree_store_event(
 233        &mut self,
 234        _worktree_store: Entity<WorktreeStore>,
 235        event: &WorktreeStoreEvent,
 236        cx: &mut Context<Self>,
 237    ) {
 238        use WorktreeStoreEvent::*;
 239        match event {
 240            WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
 241                let state = Arc::downgrade(&self.state);
 242                let worktree_id = *worktree_id;
 243                let updated_entries_set = updated_entries_set.clone();
 244                cx.background_spawn(async move {
 245                    let Some(state) = state.upgrade() else { return };
 246                    let mut state = state.lock().await;
 247                    for (path, entry_id, path_change) in updated_entries_set.iter() {
 248                        if let PathChange::Removed = path_change {
 249                            state.files.remove(entry_id);
 250                            state.dirty_files.remove(entry_id);
 251                        } else {
 252                            let project_path = ProjectPath {
 253                                worktree_id,
 254                                path: path.clone(),
 255                            };
 256                            state.dirty_files.insert(*entry_id, project_path);
 257                        }
 258                    }
 259                    match state.dirty_files_tx.try_send(()) {
 260                        Err(err) if err.is_disconnected() => {
 261                            log::error!("bug: syntax indexing queue is disconnected");
 262                        }
 263                        _ => {}
 264                    }
 265                })
 266                .detach();
 267            }
 268            WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
 269                let project_entry_id = *project_entry_id;
 270                self.with_state(cx, move |state| {
 271                    state.files.remove(&project_entry_id);
 272                })
 273            }
 274            _ => {}
 275        }
 276    }
 277
 278    fn handle_buffer_store_event(
 279        &mut self,
 280        _buffer_store: Entity<BufferStore>,
 281        event: &BufferStoreEvent,
 282        cx: &mut Context<Self>,
 283    ) {
 284        use BufferStoreEvent::*;
 285        match event {
 286            BufferAdded(buffer) => self.register_buffer(buffer, cx),
 287            BufferOpened { .. }
 288            | BufferChangedFilePath { .. }
 289            | BufferDropped { .. }
 290            | SharedBufferClosed { .. } => {}
 291        }
 292    }
 293
 294    pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
 295        &self.state
 296    }
 297
 298    fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
 299        if let Some(mut state) = self.state.try_lock() {
 300            f(&mut state);
 301            return;
 302        }
 303        let state = Arc::downgrade(&self.state);
 304        cx.background_spawn(async move {
 305            let Some(state) = state.upgrade() else {
 306                return;
 307            };
 308            let mut state = state.lock().await;
 309            f(&mut state)
 310        })
 311        .detach();
 312    }
 313
 314    fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
 315        let buffer_id = buffer.read(cx).remote_id();
 316        cx.observe_release(buffer, move |this, _buffer, cx| {
 317            this.with_state(cx, move |state| {
 318                if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
 319                    SyntaxIndexState::remove_buffer_declarations(
 320                        &buffer_state.declarations,
 321                        &mut state.declarations,
 322                        &mut state.identifiers,
 323                    );
 324                }
 325            })
 326        })
 327        .detach();
 328        cx.subscribe(buffer, Self::handle_buffer_event).detach();
 329
 330        self.update_buffer(buffer.clone(), cx);
 331    }
 332
 333    fn handle_buffer_event(
 334        &mut self,
 335        buffer: Entity<Buffer>,
 336        event: &BufferEvent,
 337        cx: &mut Context<Self>,
 338    ) {
 339        match event {
 340            BufferEvent::Edited => self.update_buffer(buffer, cx),
 341            _ => {}
 342        }
 343    }
 344
 345    fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
 346        let buffer = buffer_entity.read(cx);
 347        if buffer.language().is_none() {
 348            return;
 349        }
 350
 351        let Some(project_entry_id) =
 352            project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
 353        else {
 354            return;
 355        };
 356        let buffer_id = buffer.remote_id();
 357
 358        let mut parse_status = buffer.parse_status();
 359        let snapshot_task = cx.spawn({
 360            let weak_buffer = buffer_entity.downgrade();
 361            async move |_, cx| {
 362                while *parse_status.borrow() != language::ParseStatus::Idle {
 363                    parse_status.changed().await?;
 364                }
 365                weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 366            }
 367        });
 368
 369        let state = Arc::downgrade(&self.state);
 370        let task = cx.background_spawn(async move {
 371            // TODO: How to handle errors?
 372            let Ok(snapshot) = snapshot_task.await else {
 373                return;
 374            };
 375            let rope = snapshot.text.as_rope();
 376
 377            let declarations = declarations_in_buffer(&snapshot)
 378                .into_iter()
 379                .map(|item| {
 380                    (
 381                        item.parent_index,
 382                        BufferDeclaration::from_outline(item, &rope),
 383                    )
 384                })
 385                .collect::<Vec<_>>();
 386
 387            let Some(state) = state.upgrade() else {
 388                return;
 389            };
 390            let mut state = state.lock().await;
 391            let state = state.deref_mut();
 392
 393            let buffer_state = state
 394                .buffers
 395                .entry(buffer_id)
 396                .or_insert_with(Default::default);
 397
 398            SyntaxIndexState::remove_buffer_declarations(
 399                &buffer_state.declarations,
 400                &mut state.declarations,
 401                &mut state.identifiers,
 402            );
 403
 404            let mut new_ids = Vec::with_capacity(declarations.len());
 405            state.declarations.reserve(declarations.len());
 406            for (parent_index, mut declaration) in declarations {
 407                declaration.parent =
 408                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 409
 410                let identifier = declaration.identifier.clone();
 411                let declaration_id = state.declarations.insert(Declaration::Buffer {
 412                    rope: rope.clone(),
 413                    buffer_id,
 414                    declaration,
 415                    project_entry_id,
 416                });
 417                new_ids.push(declaration_id);
 418
 419                state
 420                    .identifiers
 421                    .entry(identifier)
 422                    .or_default()
 423                    .insert(declaration_id);
 424            }
 425
 426            buffer_state.declarations = new_ids;
 427        });
 428
 429        self.with_state(cx, move |state| {
 430            state
 431                .buffers
 432                .entry(buffer_id)
 433                .or_insert_with(Default::default)
 434                .task = Some(task)
 435        });
 436    }
 437
 438    fn update_file(
 439        &mut self,
 440        entry_id: ProjectEntryId,
 441        project_path: ProjectPath,
 442        cx: &mut Context<Self>,
 443    ) -> Task<()> {
 444        let Some(project) = self.project.upgrade() else {
 445            return Task::ready(());
 446        };
 447        let project = project.read(cx);
 448
 449        let language_registry = project.languages();
 450        let Some(available_language) =
 451            language_registry.language_for_file_path(project_path.path.as_std_path())
 452        else {
 453            return Task::ready(());
 454        };
 455        let language = if let Some(Ok(Ok(language))) = language_registry
 456            .load_language(&available_language)
 457            .now_or_never()
 458        {
 459            if language
 460                .grammar()
 461                .is_none_or(|grammar| grammar.outline_config.is_none())
 462            {
 463                return Task::ready(());
 464            }
 465            future::Either::Left(async { Ok(language) })
 466        } else {
 467            let language_registry = language_registry.clone();
 468            future::Either::Right(async move {
 469                anyhow::Ok(
 470                    language_registry
 471                        .load_language(&available_language)
 472                        .await??,
 473                )
 474            })
 475        };
 476
 477        let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
 478            return Task::ready(());
 479        };
 480
 481        let snapshot_task = worktree.update(cx, |worktree, cx| {
 482            let load_task = worktree.load_file(&project_path.path, cx);
 483            cx.spawn(async move |_this, cx| {
 484                let loaded_file = load_task.await?;
 485                let language = language.await?;
 486
 487                let buffer = cx.new(|cx| {
 488                    let mut buffer = Buffer::local(loaded_file.text, cx);
 489                    buffer.set_language(Some(language), cx);
 490                    buffer
 491                })?;
 492
 493                let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
 494                while *parse_status.borrow() != language::ParseStatus::Idle {
 495                    parse_status.changed().await?;
 496                }
 497
 498                buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 499            })
 500        });
 501
 502        let state = Arc::downgrade(&self.state);
 503        cx.background_spawn(async move {
 504            // TODO: How to handle errors?
 505            let Ok(snapshot) = snapshot_task.await else {
 506                return;
 507            };
 508            let rope = snapshot.as_rope();
 509            let declarations = declarations_in_buffer(&snapshot)
 510                .into_iter()
 511                .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
 512                .collect::<Vec<_>>();
 513
 514            let Some(state) = state.upgrade() else {
 515                return;
 516            };
 517            let mut state = state.lock().await;
 518            let state = state.deref_mut();
 519
 520            let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
 521            for old_declaration_id in &file_state.declarations {
 522                let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
 523                    debug_panic!("declaration not found");
 524                    continue;
 525                };
 526                if let Some(identifier_declarations) =
 527                    state.identifiers.get_mut(declaration.identifier())
 528                {
 529                    identifier_declarations.remove(old_declaration_id);
 530                }
 531            }
 532
 533            let mut new_ids = Vec::with_capacity(declarations.len());
 534            state.declarations.reserve(declarations.len());
 535            for (parent_index, mut declaration) in declarations {
 536                declaration.parent =
 537                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 538
 539                let identifier = declaration.identifier.clone();
 540                let declaration_id = state.declarations.insert(Declaration::File {
 541                    project_entry_id: entry_id,
 542                    declaration,
 543                });
 544                new_ids.push(declaration_id);
 545
 546                state
 547                    .identifiers
 548                    .entry(identifier)
 549                    .or_default()
 550                    .insert(declaration_id);
 551            }
 552            file_state.declarations = new_ids;
 553        })
 554    }
 555}
 556
 557impl SyntaxIndexState {
 558    pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
 559        self.declarations.get(id)
 560    }
 561
 562    /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
 563    ///
 564    /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
 565    pub fn declarations_for_identifier<const N: usize>(
 566        &self,
 567        identifier: &Identifier,
 568    ) -> Vec<(DeclarationId, &Declaration)> {
 569        // make sure to not have a large stack allocation
 570        assert!(N < 32);
 571
 572        let Some(declaration_ids) = self.identifiers.get(&identifier) else {
 573            return vec![];
 574        };
 575
 576        let mut result = Vec::with_capacity(N);
 577        let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
 578        let mut file_declarations = Vec::new();
 579
 580        for declaration_id in declaration_ids {
 581            let declaration = self.declarations.get(*declaration_id);
 582            let Some(declaration) = some_or_debug_panic(declaration) else {
 583                continue;
 584            };
 585            match declaration {
 586                Declaration::Buffer {
 587                    project_entry_id, ..
 588                } => {
 589                    included_buffer_entry_ids.push(*project_entry_id);
 590                    result.push((*declaration_id, declaration));
 591                    if result.len() == N {
 592                        return Vec::new();
 593                    }
 594                }
 595                Declaration::File {
 596                    project_entry_id, ..
 597                } => {
 598                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 599                        file_declarations.push((*declaration_id, declaration));
 600                    }
 601                }
 602            }
 603        }
 604
 605        for (declaration_id, declaration) in file_declarations {
 606            match declaration {
 607                Declaration::File {
 608                    project_entry_id, ..
 609                } => {
 610                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 611                        result.push((declaration_id, declaration));
 612
 613                        if result.len() == N {
 614                            return Vec::new();
 615                        }
 616                    }
 617                }
 618                Declaration::Buffer { .. } => {}
 619            }
 620        }
 621
 622        result
 623    }
 624
 625    pub fn buffer_declarations_containing_range(
 626        &self,
 627        buffer_id: BufferId,
 628        range: Range<usize>,
 629    ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
 630        let Some(buffer_state) = self.buffers.get(&buffer_id) else {
 631            return itertools::Either::Left(iter::empty());
 632        };
 633
 634        let iter = buffer_state
 635            .declarations
 636            .iter()
 637            .filter_map(move |declaration_id| {
 638                let Some(declaration) = self
 639                    .declarations
 640                    .get(*declaration_id)
 641                    .and_then(|d| d.as_buffer())
 642                else {
 643                    log::error!("bug: missing buffer outline declaration");
 644                    return None;
 645                };
 646                if declaration.item_range.contains_inclusive(&range) {
 647                    return Some((*declaration_id, declaration));
 648                }
 649                return None;
 650            });
 651        itertools::Either::Right(iter)
 652    }
 653
 654    pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
 655        match declaration {
 656            Declaration::File {
 657                project_entry_id, ..
 658            } => self
 659                .files
 660                .get(project_entry_id)
 661                .map(|file_state| file_state.declarations.len())
 662                .unwrap_or_default(),
 663            Declaration::Buffer { buffer_id, .. } => self
 664                .buffers
 665                .get(buffer_id)
 666                .map(|buffer_state| buffer_state.declarations.len())
 667                .unwrap_or_default(),
 668        }
 669    }
 670
 671    fn remove_buffer_declarations(
 672        old_declaration_ids: &[DeclarationId],
 673        declarations: &mut SlotMap<DeclarationId, Declaration>,
 674        identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
 675    ) {
 676        for old_declaration_id in old_declaration_ids {
 677            let Some(declaration) = declarations.remove(*old_declaration_id) else {
 678                debug_panic!("declaration not found");
 679                continue;
 680            };
 681            if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
 682                identifier_declarations.remove(old_declaration_id);
 683            }
 684        }
 685    }
 686}
 687
 688#[cfg(test)]
 689mod tests {
 690    use super::*;
 691    use std::sync::Arc;
 692
 693    use gpui::TestAppContext;
 694    use indoc::indoc;
 695    use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
 696    use project::{FakeFs, Project};
 697    use serde_json::json;
 698    use settings::SettingsStore;
 699    use text::OffsetRangeExt as _;
 700    use util::{path, rel_path::rel_path};
 701
 702    use crate::syntax_index::SyntaxIndex;
 703
 704    #[gpui::test]
 705    async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
 706        let (project, index, rust_lang_id) = init_test(cx).await;
 707        let main = Identifier {
 708            name: "main".into(),
 709            language_id: rust_lang_id,
 710        };
 711
 712        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 713        let index_state = index_state.lock().await;
 714        cx.update(|cx| {
 715            let decls = index_state.declarations_for_identifier::<8>(&main);
 716            assert_eq!(decls.len(), 2);
 717
 718            let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
 719            assert_eq!(decl.identifier, main);
 720            assert_eq!(decl.item_range, 0..98);
 721
 722            let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
 723            assert_eq!(decl.identifier, main.clone());
 724            assert_eq!(decl.item_range, 32..280);
 725        });
 726    }
 727
 728    #[gpui::test]
 729    async fn test_parents_in_file(cx: &mut TestAppContext) {
 730        let (project, index, rust_lang_id) = init_test(cx).await;
 731        let test_process_data = Identifier {
 732            name: "test_process_data".into(),
 733            language_id: rust_lang_id,
 734        };
 735
 736        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 737        let index_state = index_state.lock().await;
 738        cx.update(|cx| {
 739            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 740            assert_eq!(decls.len(), 1);
 741
 742            let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
 743            assert_eq!(decl.identifier, test_process_data);
 744
 745            let parent_id = decl.parent.unwrap();
 746            let parent = index_state.declaration(parent_id).unwrap();
 747            let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
 748            assert_eq!(
 749                parent_decl.identifier,
 750                Identifier {
 751                    name: "tests".into(),
 752                    language_id: rust_lang_id
 753                }
 754            );
 755            assert_eq!(parent_decl.parent, None);
 756        });
 757    }
 758
 759    #[gpui::test]
 760    async fn test_parents_in_buffer(cx: &mut TestAppContext) {
 761        let (project, index, rust_lang_id) = init_test(cx).await;
 762        let test_process_data = Identifier {
 763            name: "test_process_data".into(),
 764            language_id: rust_lang_id,
 765        };
 766
 767        let buffer = project
 768            .update(cx, |project, cx| {
 769                let project_path = project.find_project_path("c.rs", cx).unwrap();
 770                project.open_buffer(project_path, cx)
 771            })
 772            .await
 773            .unwrap();
 774
 775        cx.run_until_parked();
 776
 777        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 778        let index_state = index_state.lock().await;
 779        cx.update(|cx| {
 780            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 781            assert_eq!(decls.len(), 1);
 782
 783            let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 784            assert_eq!(decl.identifier, test_process_data);
 785
 786            let parent_id = decl.parent.unwrap();
 787            let parent = index_state.declaration(parent_id).unwrap();
 788            let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
 789            assert_eq!(
 790                parent_decl.identifier,
 791                Identifier {
 792                    name: "tests".into(),
 793                    language_id: rust_lang_id
 794                }
 795            );
 796            assert_eq!(parent_decl.parent, None);
 797        });
 798
 799        drop(buffer);
 800    }
 801
 802    #[gpui::test]
 803    async fn test_declarations_limt(cx: &mut TestAppContext) {
 804        let (_, index, rust_lang_id) = init_test(cx).await;
 805
 806        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 807        let index_state = index_state.lock().await;
 808        let decls = index_state.declarations_for_identifier::<1>(&Identifier {
 809            name: "main".into(),
 810            language_id: rust_lang_id,
 811        });
 812        assert_eq!(decls.len(), 0);
 813    }
 814
 815    #[gpui::test]
 816    async fn test_buffer_shadow(cx: &mut TestAppContext) {
 817        let (project, index, rust_lang_id) = init_test(cx).await;
 818
 819        let main = Identifier {
 820            name: "main".into(),
 821            language_id: rust_lang_id,
 822        };
 823
 824        let buffer = project
 825            .update(cx, |project, cx| {
 826                let project_path = project.find_project_path("c.rs", cx).unwrap();
 827                project.open_buffer(project_path, cx)
 828            })
 829            .await
 830            .unwrap();
 831
 832        cx.run_until_parked();
 833
 834        let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
 835        {
 836            let index_state = index_state_arc.lock().await;
 837
 838            cx.update(|cx| {
 839                let decls = index_state.declarations_for_identifier::<8>(&main);
 840                assert_eq!(decls.len(), 2);
 841                let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 842                assert_eq!(decl.identifier, main);
 843                assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
 844
 845                expect_file_decl("a.rs", &decls[1].1, &project, cx);
 846            });
 847        }
 848
 849        // Drop the buffer and wait for release
 850        cx.update(|_| {
 851            drop(buffer);
 852        });
 853        cx.run_until_parked();
 854
 855        let index_state = index_state_arc.lock().await;
 856
 857        cx.update(|cx| {
 858            let decls = index_state.declarations_for_identifier::<8>(&main);
 859            assert_eq!(decls.len(), 2);
 860            expect_file_decl("a.rs", &decls[0].1, &project, cx);
 861            expect_file_decl("c.rs", &decls[1].1, &project, cx);
 862        });
 863    }
 864
 865    fn expect_buffer_decl<'a>(
 866        path: &str,
 867        declaration: &'a Declaration,
 868        project: &Entity<Project>,
 869        cx: &App,
 870    ) -> &'a BufferDeclaration {
 871        if let Declaration::Buffer {
 872            declaration,
 873            project_entry_id,
 874            ..
 875        } = declaration
 876        {
 877            let project_path = project
 878                .read(cx)
 879                .path_for_entry(*project_entry_id, cx)
 880                .unwrap();
 881            assert_eq!(project_path.path.as_ref(), rel_path(path),);
 882            declaration
 883        } else {
 884            panic!("Expected a buffer declaration, found {:?}", declaration);
 885        }
 886    }
 887
 888    fn expect_file_decl<'a>(
 889        path: &str,
 890        declaration: &'a Declaration,
 891        project: &Entity<Project>,
 892        cx: &App,
 893    ) -> &'a FileDeclaration {
 894        if let Declaration::File {
 895            declaration,
 896            project_entry_id: file,
 897        } = declaration
 898        {
 899            assert_eq!(
 900                project
 901                    .read(cx)
 902                    .path_for_entry(*file, cx)
 903                    .unwrap()
 904                    .path
 905                    .as_ref(),
 906                rel_path(path),
 907            );
 908            declaration
 909        } else {
 910            panic!("Expected a file declaration, found {:?}", declaration);
 911        }
 912    }
 913
 914    async fn init_test(
 915        cx: &mut TestAppContext,
 916    ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
 917        cx.update(|cx| {
 918            let settings_store = SettingsStore::test(cx);
 919            cx.set_global(settings_store);
 920            language::init(cx);
 921            Project::init_settings(cx);
 922        });
 923
 924        let fs = FakeFs::new(cx.executor());
 925        fs.insert_tree(
 926            path!("/root"),
 927            json!({
 928                "a.rs": indoc! {r#"
 929                    fn main() {
 930                        let x = 1;
 931                        let y = 2;
 932                        let z = add(x, y);
 933                        println!("Result: {}", z);
 934                    }
 935
 936                    fn add(a: i32, b: i32) -> i32 {
 937                        a + b
 938                    }
 939                "#},
 940                "b.rs": indoc! {"
 941                    pub struct Config {
 942                        pub name: String,
 943                        pub value: i32,
 944                    }
 945
 946                    impl Config {
 947                        pub fn new(name: String, value: i32) -> Self {
 948                            Config { name, value }
 949                        }
 950                    }
 951                "},
 952                "c.rs": indoc! {r#"
 953                    use std::collections::HashMap;
 954
 955                    fn main() {
 956                        let args: Vec<String> = std::env::args().collect();
 957                        let data: Vec<i32> = args[1..]
 958                            .iter()
 959                            .filter_map(|s| s.parse().ok())
 960                            .collect();
 961                        let result = process_data(data);
 962                        println!("{:?}", result);
 963                    }
 964
 965                    fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
 966                        let mut counts = HashMap::new();
 967                        for value in data {
 968                            *counts.entry(value).or_insert(0) += 1;
 969                        }
 970                        counts
 971                    }
 972
 973                    #[cfg(test)]
 974                    mod tests {
 975                        use super::*;
 976
 977                        #[test]
 978                        fn test_process_data() {
 979                            let data = vec![1, 2, 2, 3];
 980                            let result = process_data(data);
 981                            assert_eq!(result.get(&2), Some(&2));
 982                        }
 983                    }
 984                "#}
 985            }),
 986        )
 987        .await;
 988        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 989        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 990        let lang = rust_lang();
 991        let lang_id = lang.id();
 992        language_registry.add(Arc::new(lang));
 993
 994        let file_indexing_parallelism = 2;
 995        let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
 996        cx.run_until_parked();
 997
 998        (project, index, lang_id)
 999    }
1000
1001    fn rust_lang() -> Language {
1002        Language::new(
1003            LanguageConfig {
1004                name: "Rust".into(),
1005                matcher: LanguageMatcher {
1006                    path_suffixes: vec!["rs".to_string()],
1007                    ..Default::default()
1008                },
1009                ..Default::default()
1010            },
1011            Some(tree_sitter_rust::LANGUAGE.into()),
1012        )
1013        .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1014        .unwrap()
1015    }
1016}