syntax_index.rs

   1use anyhow::{Result, anyhow};
   2use collections::{HashMap, HashSet};
   3use futures::channel::mpsc;
   4use futures::lock::Mutex;
   5use futures::{FutureExt as _, StreamExt, future};
   6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
   7use itertools::Itertools;
   8use language::{Buffer, BufferEvent};
   9use postage::stream::Stream as _;
  10use project::buffer_store::{BufferStore, BufferStoreEvent};
  11use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
  12use project::{PathChange, Project, ProjectEntryId, ProjectPath};
  13use slotmap::SlotMap;
  14use std::iter;
  15use std::ops::{DerefMut, Range};
  16use std::sync::Arc;
  17use text::BufferId;
  18use util::{RangeExt as _, debug_panic, some_or_debug_panic};
  19
  20use crate::declaration::{
  21    BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
  22};
  23use crate::outline::declarations_in_buffer;
  24
  25// TODO
  26//
  27// * Also queue / debounce buffer changes. A challenge for this is that use of
  28// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
  29//
  30// * Add a per language configuration for skipping indexing.
  31
  32// Potential future improvements:
  33//
  34// * Prevent indexing of a large file from blocking the queue.
  35//
  36// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
  37// references are present and their scores.
  38//
  39// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
  40// file in a build dependency. Should not be editable in that case - but how to distinguish the case
  41// where it should be editable?
  42
  43// Potential future optimizations:
  44//
  45// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
  46// of priority system to the background executor could help - it's single threaded for now to avoid
  47// interfering with other work.
  48//
  49// * Parse files directly instead of loading into a Rope.
  50//
  51//   - This would allow the task handling dirty_files to be done entirely on the background executor.
  52//
  53//   - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
  54//   but that can be done by scanning characters in the flat representation.
  55//
  56// * Use something similar to slotmap without key versions.
  57//
  58// * Concurrent slotmap
  59
  60pub struct SyntaxIndex {
  61    state: Arc<Mutex<SyntaxIndexState>>,
  62    project: WeakEntity<Project>,
  63    initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
  64}
  65
  66pub struct SyntaxIndexState {
  67    declarations: SlotMap<DeclarationId, Declaration>,
  68    identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
  69    files: HashMap<ProjectEntryId, FileState>,
  70    buffers: HashMap<BufferId, BufferState>,
  71    dirty_files: HashMap<ProjectEntryId, ProjectPath>,
  72    dirty_files_tx: mpsc::Sender<()>,
  73    _file_indexing_task: Option<Task<()>>,
  74}
  75
  76#[derive(Debug, Default)]
  77struct FileState {
  78    declarations: Vec<DeclarationId>,
  79}
  80
  81#[derive(Default)]
  82struct BufferState {
  83    declarations: Vec<DeclarationId>,
  84    task: Option<Task<()>>,
  85}
  86
  87impl SyntaxIndex {
  88    pub fn new(
  89        project: &Entity<Project>,
  90        file_indexing_parallelism: usize,
  91        cx: &mut Context<Self>,
  92    ) -> Self {
  93        assert!(file_indexing_parallelism > 0);
  94        let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
  95        let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
  96            postage::watch::channel();
  97
  98        let initial_state = SyntaxIndexState {
  99            declarations: SlotMap::default(),
 100            identifiers: HashMap::default(),
 101            files: HashMap::default(),
 102            buffers: HashMap::default(),
 103            dirty_files: HashMap::default(),
 104            dirty_files_tx,
 105            _file_indexing_task: None,
 106        };
 107        let this = Self {
 108            project: project.downgrade(),
 109            state: Arc::new(Mutex::new(initial_state)),
 110            initial_file_indexing_done_rx,
 111        };
 112
 113        let worktree_store = project.read(cx).worktree_store();
 114        let initial_worktree_snapshots = worktree_store
 115            .read(cx)
 116            .worktrees()
 117            .map(|w| w.read(cx).snapshot())
 118            .collect::<Vec<_>>();
 119        this.state.try_lock().unwrap()._file_indexing_task =
 120            Some(cx.spawn(async move |this, cx| {
 121                let snapshots_file_count = initial_worktree_snapshots
 122                    .iter()
 123                    .map(|worktree| worktree.file_count())
 124                    .sum::<usize>();
 125                let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
 126                let chunk_count = snapshots_file_count.div_ceil(chunk_size);
 127                let file_chunks = initial_worktree_snapshots
 128                    .iter()
 129                    .flat_map(|worktree| {
 130                        let worktree_id = worktree.id();
 131                        worktree.files(false, 0).map(move |entry| {
 132                            (
 133                                entry.id,
 134                                ProjectPath {
 135                                    worktree_id,
 136                                    path: entry.path.clone(),
 137                                },
 138                            )
 139                        })
 140                    })
 141                    .chunks(chunk_size);
 142
 143                let mut tasks = Vec::with_capacity(chunk_count);
 144                for chunk in file_chunks.into_iter() {
 145                    tasks.push(Self::update_dirty_files(
 146                        &this,
 147                        chunk.into_iter().collect(),
 148                        cx.clone(),
 149                    ));
 150                }
 151                futures::future::join_all(tasks).await;
 152
 153                log::info!("Finished initial file indexing");
 154                *initial_file_indexing_done_tx.borrow_mut() = true;
 155
 156                let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
 157                    return;
 158                };
 159                while dirty_files_rx.next().await.is_some() {
 160                    let mut state = state.lock().await;
 161                    let was_underused = state.dirty_files.capacity() > 255
 162                        && state.dirty_files.len() * 8 < state.dirty_files.capacity();
 163                    let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
 164                    if was_underused {
 165                        state.dirty_files.shrink_to_fit();
 166                    }
 167                    drop(state);
 168                    if dirty_files.is_empty() {
 169                        continue;
 170                    }
 171
 172                    let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
 173                    let chunk_count = dirty_files.len().div_ceil(chunk_size);
 174                    let mut tasks = Vec::with_capacity(chunk_count);
 175                    let chunks = dirty_files.into_iter().chunks(chunk_size);
 176                    for chunk in chunks.into_iter() {
 177                        tasks.push(Self::update_dirty_files(
 178                            &this,
 179                            chunk.into_iter().collect(),
 180                            cx.clone(),
 181                        ));
 182                    }
 183                    futures::future::join_all(tasks).await;
 184                }
 185            }));
 186
 187        cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
 188            .detach();
 189
 190        let buffer_store = project.read(cx).buffer_store().clone();
 191        for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
 192            this.register_buffer(&buffer, cx);
 193        }
 194        cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
 195            .detach();
 196
 197        this
 198    }
 199
 200    async fn update_dirty_files(
 201        this: &WeakEntity<Self>,
 202        dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
 203        mut cx: AsyncApp,
 204    ) {
 205        for (entry_id, project_path) in dirty_files {
 206            let Ok(task) = this.update(&mut cx, |this, cx| {
 207                this.update_file(entry_id, project_path, cx)
 208            }) else {
 209                return;
 210            };
 211            task.await;
 212        }
 213    }
 214
 215    pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
 216        if *self.initial_file_indexing_done_rx.borrow() {
 217            Task::ready(Ok(()))
 218        } else {
 219            let mut rx = self.initial_file_indexing_done_rx.clone();
 220            cx.background_spawn(async move {
 221                loop {
 222                    match rx.recv().await {
 223                        Some(true) => return Ok(()),
 224                        Some(false) => {}
 225                        None => {
 226                            return Err(anyhow!(
 227                                "SyntaxIndex dropped while waiting for initial file indexing"
 228                            ));
 229                        }
 230                    }
 231                }
 232            })
 233        }
 234    }
 235
 236    pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
 237        let state = self.state.clone();
 238        let project = self.project.clone();
 239
 240        cx.spawn(async move |cx| {
 241            let state = state.lock().await;
 242            let Some(project) = project.upgrade() else {
 243                return vec![];
 244            };
 245            project
 246                .read_with(cx, |project, cx| {
 247                    state
 248                        .files
 249                        .keys()
 250                        .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
 251                        .collect()
 252                })
 253                .unwrap_or_default()
 254        })
 255    }
 256
 257    fn handle_worktree_store_event(
 258        &mut self,
 259        _worktree_store: Entity<WorktreeStore>,
 260        event: &WorktreeStoreEvent,
 261        cx: &mut Context<Self>,
 262    ) {
 263        use WorktreeStoreEvent::*;
 264        match event {
 265            WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
 266                let state = Arc::downgrade(&self.state);
 267                let worktree_id = *worktree_id;
 268                let updated_entries_set = updated_entries_set.clone();
 269                cx.background_spawn(async move {
 270                    let Some(state) = state.upgrade() else { return };
 271                    let mut state = state.lock().await;
 272                    for (path, entry_id, path_change) in updated_entries_set.iter() {
 273                        if let PathChange::Removed = path_change {
 274                            state.files.remove(entry_id);
 275                            state.dirty_files.remove(entry_id);
 276                        } else {
 277                            let project_path = ProjectPath {
 278                                worktree_id,
 279                                path: path.clone(),
 280                            };
 281                            state.dirty_files.insert(*entry_id, project_path);
 282                        }
 283                    }
 284                    match state.dirty_files_tx.try_send(()) {
 285                        Err(err) if err.is_disconnected() => {
 286                            log::error!("bug: syntax indexing queue is disconnected");
 287                        }
 288                        _ => {}
 289                    }
 290                })
 291                .detach();
 292            }
 293            WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
 294                let project_entry_id = *project_entry_id;
 295                self.with_state(cx, move |state| {
 296                    state.files.remove(&project_entry_id);
 297                })
 298            }
 299            _ => {}
 300        }
 301    }
 302
 303    fn handle_buffer_store_event(
 304        &mut self,
 305        _buffer_store: Entity<BufferStore>,
 306        event: &BufferStoreEvent,
 307        cx: &mut Context<Self>,
 308    ) {
 309        use BufferStoreEvent::*;
 310        match event {
 311            BufferAdded(buffer) => self.register_buffer(buffer, cx),
 312            BufferOpened { .. }
 313            | BufferChangedFilePath { .. }
 314            | BufferDropped { .. }
 315            | SharedBufferClosed { .. } => {}
 316        }
 317    }
 318
 319    pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
 320        &self.state
 321    }
 322
 323    fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
 324        if let Some(mut state) = self.state.try_lock() {
 325            f(&mut state);
 326            return;
 327        }
 328        let state = Arc::downgrade(&self.state);
 329        cx.background_spawn(async move {
 330            let Some(state) = state.upgrade() else {
 331                return;
 332            };
 333            let mut state = state.lock().await;
 334            f(&mut state)
 335        })
 336        .detach();
 337    }
 338
 339    fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
 340        let buffer_id = buffer.read(cx).remote_id();
 341        cx.observe_release(buffer, move |this, _buffer, cx| {
 342            this.with_state(cx, move |state| {
 343                if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
 344                    SyntaxIndexState::remove_buffer_declarations(
 345                        &buffer_state.declarations,
 346                        &mut state.declarations,
 347                        &mut state.identifiers,
 348                    );
 349                }
 350            })
 351        })
 352        .detach();
 353        cx.subscribe(buffer, Self::handle_buffer_event).detach();
 354
 355        self.update_buffer(buffer.clone(), cx);
 356    }
 357
 358    fn handle_buffer_event(
 359        &mut self,
 360        buffer: Entity<Buffer>,
 361        event: &BufferEvent,
 362        cx: &mut Context<Self>,
 363    ) {
 364        match event {
 365            BufferEvent::Edited => self.update_buffer(buffer, cx),
 366            _ => {}
 367        }
 368    }
 369
 370    fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
 371        let buffer = buffer_entity.read(cx);
 372        if buffer.language().is_none() {
 373            return;
 374        }
 375
 376        let Some(project_entry_id) =
 377            project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
 378        else {
 379            return;
 380        };
 381        let buffer_id = buffer.remote_id();
 382
 383        let mut parse_status = buffer.parse_status();
 384        let snapshot_task = cx.spawn({
 385            let weak_buffer = buffer_entity.downgrade();
 386            async move |_, cx| {
 387                while *parse_status.borrow() != language::ParseStatus::Idle {
 388                    parse_status.changed().await?;
 389                }
 390                weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 391            }
 392        });
 393
 394        let state = Arc::downgrade(&self.state);
 395        let task = cx.background_spawn(async move {
 396            // TODO: How to handle errors?
 397            let Ok(snapshot) = snapshot_task.await else {
 398                return;
 399            };
 400            let rope = snapshot.text.as_rope();
 401
 402            let declarations = declarations_in_buffer(&snapshot)
 403                .into_iter()
 404                .map(|item| {
 405                    (
 406                        item.parent_index,
 407                        BufferDeclaration::from_outline(item, &rope),
 408                    )
 409                })
 410                .collect::<Vec<_>>();
 411
 412            let Some(state) = state.upgrade() else {
 413                return;
 414            };
 415            let mut state = state.lock().await;
 416            let state = state.deref_mut();
 417
 418            let buffer_state = state
 419                .buffers
 420                .entry(buffer_id)
 421                .or_insert_with(Default::default);
 422
 423            SyntaxIndexState::remove_buffer_declarations(
 424                &buffer_state.declarations,
 425                &mut state.declarations,
 426                &mut state.identifiers,
 427            );
 428
 429            let mut new_ids = Vec::with_capacity(declarations.len());
 430            state.declarations.reserve(declarations.len());
 431            for (parent_index, mut declaration) in declarations {
 432                declaration.parent =
 433                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 434
 435                let identifier = declaration.identifier.clone();
 436                let declaration_id = state.declarations.insert(Declaration::Buffer {
 437                    rope: rope.clone(),
 438                    buffer_id,
 439                    declaration,
 440                    project_entry_id,
 441                });
 442                new_ids.push(declaration_id);
 443
 444                state
 445                    .identifiers
 446                    .entry(identifier)
 447                    .or_default()
 448                    .insert(declaration_id);
 449            }
 450
 451            buffer_state.declarations = new_ids;
 452        });
 453
 454        self.with_state(cx, move |state| {
 455            state
 456                .buffers
 457                .entry(buffer_id)
 458                .or_insert_with(Default::default)
 459                .task = Some(task)
 460        });
 461    }
 462
 463    fn update_file(
 464        &mut self,
 465        entry_id: ProjectEntryId,
 466        project_path: ProjectPath,
 467        cx: &mut Context<Self>,
 468    ) -> Task<()> {
 469        let Some(project) = self.project.upgrade() else {
 470            return Task::ready(());
 471        };
 472        let project = project.read(cx);
 473
 474        let language_registry = project.languages();
 475        let Some(available_language) =
 476            language_registry.language_for_file_path(project_path.path.as_std_path())
 477        else {
 478            return Task::ready(());
 479        };
 480        let language = if let Some(Ok(Ok(language))) = language_registry
 481            .load_language(&available_language)
 482            .now_or_never()
 483        {
 484            if language
 485                .grammar()
 486                .is_none_or(|grammar| grammar.outline_config.is_none())
 487            {
 488                return Task::ready(());
 489            }
 490            future::Either::Left(async { Ok(language) })
 491        } else {
 492            let language_registry = language_registry.clone();
 493            future::Either::Right(async move {
 494                anyhow::Ok(
 495                    language_registry
 496                        .load_language(&available_language)
 497                        .await??,
 498                )
 499            })
 500        };
 501
 502        let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
 503            return Task::ready(());
 504        };
 505
 506        let snapshot_task = worktree.update(cx, |worktree, cx| {
 507            let load_task = worktree.load_file(&project_path.path, cx);
 508            cx.spawn(async move |_this, cx| {
 509                let loaded_file = load_task.await?;
 510                let language = language.await?;
 511
 512                let buffer = cx.new(|cx| {
 513                    let mut buffer = Buffer::local(loaded_file.text, cx);
 514                    buffer.set_language(Some(language), cx);
 515                    buffer
 516                })?;
 517
 518                let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
 519                while *parse_status.borrow() != language::ParseStatus::Idle {
 520                    parse_status.changed().await?;
 521                }
 522
 523                buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 524            })
 525        });
 526
 527        let state = Arc::downgrade(&self.state);
 528        cx.background_spawn(async move {
 529            // TODO: How to handle errors?
 530            let Ok(snapshot) = snapshot_task.await else {
 531                return;
 532            };
 533            let rope = snapshot.as_rope();
 534            let declarations = declarations_in_buffer(&snapshot)
 535                .into_iter()
 536                .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
 537                .collect::<Vec<_>>();
 538
 539            let Some(state) = state.upgrade() else {
 540                return;
 541            };
 542            let mut state = state.lock().await;
 543            let state = state.deref_mut();
 544
 545            let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
 546            for old_declaration_id in &file_state.declarations {
 547                let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
 548                    debug_panic!("declaration not found");
 549                    continue;
 550                };
 551                if let Some(identifier_declarations) =
 552                    state.identifiers.get_mut(declaration.identifier())
 553                {
 554                    identifier_declarations.remove(old_declaration_id);
 555                }
 556            }
 557
 558            let mut new_ids = Vec::with_capacity(declarations.len());
 559            state.declarations.reserve(declarations.len());
 560            for (parent_index, mut declaration) in declarations {
 561                declaration.parent =
 562                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 563
 564                let identifier = declaration.identifier.clone();
 565                let declaration_id = state.declarations.insert(Declaration::File {
 566                    project_entry_id: entry_id,
 567                    declaration,
 568                });
 569                new_ids.push(declaration_id);
 570
 571                state
 572                    .identifiers
 573                    .entry(identifier)
 574                    .or_default()
 575                    .insert(declaration_id);
 576            }
 577            file_state.declarations = new_ids;
 578        })
 579    }
 580}
 581
 582impl SyntaxIndexState {
 583    pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
 584        self.declarations.get(id)
 585    }
 586
 587    /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
 588    ///
 589    /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
 590    pub fn declarations_for_identifier<const N: usize>(
 591        &self,
 592        identifier: &Identifier,
 593    ) -> Vec<(DeclarationId, &Declaration)> {
 594        // make sure to not have a large stack allocation
 595        assert!(N < 32);
 596
 597        let Some(declaration_ids) = self.identifiers.get(&identifier) else {
 598            return vec![];
 599        };
 600
 601        let mut result = Vec::with_capacity(N);
 602        let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
 603        let mut file_declarations = Vec::new();
 604
 605        for declaration_id in declaration_ids {
 606            let declaration = self.declarations.get(*declaration_id);
 607            let Some(declaration) = some_or_debug_panic(declaration) else {
 608                continue;
 609            };
 610            match declaration {
 611                Declaration::Buffer {
 612                    project_entry_id, ..
 613                } => {
 614                    included_buffer_entry_ids.push(*project_entry_id);
 615                    result.push((*declaration_id, declaration));
 616                    if result.len() == N {
 617                        return Vec::new();
 618                    }
 619                }
 620                Declaration::File {
 621                    project_entry_id, ..
 622                } => {
 623                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 624                        file_declarations.push((*declaration_id, declaration));
 625                    }
 626                }
 627            }
 628        }
 629
 630        for (declaration_id, declaration) in file_declarations {
 631            match declaration {
 632                Declaration::File {
 633                    project_entry_id, ..
 634                } => {
 635                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 636                        result.push((declaration_id, declaration));
 637
 638                        if result.len() == N {
 639                            return Vec::new();
 640                        }
 641                    }
 642                }
 643                Declaration::Buffer { .. } => {}
 644            }
 645        }
 646
 647        result
 648    }
 649
 650    pub fn buffer_declarations_containing_range(
 651        &self,
 652        buffer_id: BufferId,
 653        range: Range<usize>,
 654    ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
 655        let Some(buffer_state) = self.buffers.get(&buffer_id) else {
 656            return itertools::Either::Left(iter::empty());
 657        };
 658
 659        let iter = buffer_state
 660            .declarations
 661            .iter()
 662            .filter_map(move |declaration_id| {
 663                let Some(declaration) = self
 664                    .declarations
 665                    .get(*declaration_id)
 666                    .and_then(|d| d.as_buffer())
 667                else {
 668                    log::error!("bug: missing buffer outline declaration");
 669                    return None;
 670                };
 671                if declaration.item_range.contains_inclusive(&range) {
 672                    return Some((*declaration_id, declaration));
 673                }
 674                return None;
 675            });
 676        itertools::Either::Right(iter)
 677    }
 678
 679    pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
 680        match declaration {
 681            Declaration::File {
 682                project_entry_id, ..
 683            } => self
 684                .files
 685                .get(project_entry_id)
 686                .map(|file_state| file_state.declarations.len())
 687                .unwrap_or_default(),
 688            Declaration::Buffer { buffer_id, .. } => self
 689                .buffers
 690                .get(buffer_id)
 691                .map(|buffer_state| buffer_state.declarations.len())
 692                .unwrap_or_default(),
 693        }
 694    }
 695
 696    fn remove_buffer_declarations(
 697        old_declaration_ids: &[DeclarationId],
 698        declarations: &mut SlotMap<DeclarationId, Declaration>,
 699        identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
 700    ) {
 701        for old_declaration_id in old_declaration_ids {
 702            let Some(declaration) = declarations.remove(*old_declaration_id) else {
 703                debug_panic!("declaration not found");
 704                continue;
 705            };
 706            if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
 707                identifier_declarations.remove(old_declaration_id);
 708            }
 709        }
 710    }
 711}
 712
 713#[cfg(test)]
 714mod tests {
 715    use super::*;
 716    use std::sync::Arc;
 717
 718    use gpui::TestAppContext;
 719    use indoc::indoc;
 720    use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
 721    use project::{FakeFs, Project};
 722    use serde_json::json;
 723    use settings::SettingsStore;
 724    use text::OffsetRangeExt as _;
 725    use util::{path, rel_path::rel_path};
 726
 727    use crate::syntax_index::SyntaxIndex;
 728
 729    #[gpui::test]
 730    async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
 731        let (project, index, rust_lang_id) = init_test(cx).await;
 732        let main = Identifier {
 733            name: "main".into(),
 734            language_id: rust_lang_id,
 735        };
 736
 737        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 738        let index_state = index_state.lock().await;
 739        cx.update(|cx| {
 740            let decls = index_state.declarations_for_identifier::<8>(&main);
 741            assert_eq!(decls.len(), 2);
 742
 743            let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
 744            assert_eq!(decl.identifier, main);
 745            assert_eq!(decl.item_range, 0..98);
 746
 747            let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
 748            assert_eq!(decl.identifier, main.clone());
 749            assert_eq!(decl.item_range, 32..280);
 750        });
 751    }
 752
 753    #[gpui::test]
 754    async fn test_parents_in_file(cx: &mut TestAppContext) {
 755        let (project, index, rust_lang_id) = init_test(cx).await;
 756        let test_process_data = Identifier {
 757            name: "test_process_data".into(),
 758            language_id: rust_lang_id,
 759        };
 760
 761        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 762        let index_state = index_state.lock().await;
 763        cx.update(|cx| {
 764            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 765            assert_eq!(decls.len(), 1);
 766
 767            let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
 768            assert_eq!(decl.identifier, test_process_data);
 769
 770            let parent_id = decl.parent.unwrap();
 771            let parent = index_state.declaration(parent_id).unwrap();
 772            let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
 773            assert_eq!(
 774                parent_decl.identifier,
 775                Identifier {
 776                    name: "tests".into(),
 777                    language_id: rust_lang_id
 778                }
 779            );
 780            assert_eq!(parent_decl.parent, None);
 781        });
 782    }
 783
 784    #[gpui::test]
 785    async fn test_parents_in_buffer(cx: &mut TestAppContext) {
 786        let (project, index, rust_lang_id) = init_test(cx).await;
 787        let test_process_data = Identifier {
 788            name: "test_process_data".into(),
 789            language_id: rust_lang_id,
 790        };
 791
 792        let buffer = project
 793            .update(cx, |project, cx| {
 794                let project_path = project.find_project_path("c.rs", cx).unwrap();
 795                project.open_buffer(project_path, cx)
 796            })
 797            .await
 798            .unwrap();
 799
 800        cx.run_until_parked();
 801
 802        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 803        let index_state = index_state.lock().await;
 804        cx.update(|cx| {
 805            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 806            assert_eq!(decls.len(), 1);
 807
 808            let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 809            assert_eq!(decl.identifier, test_process_data);
 810
 811            let parent_id = decl.parent.unwrap();
 812            let parent = index_state.declaration(parent_id).unwrap();
 813            let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
 814            assert_eq!(
 815                parent_decl.identifier,
 816                Identifier {
 817                    name: "tests".into(),
 818                    language_id: rust_lang_id
 819                }
 820            );
 821            assert_eq!(parent_decl.parent, None);
 822        });
 823
 824        drop(buffer);
 825    }
 826
 827    #[gpui::test]
 828    async fn test_declarations_limt(cx: &mut TestAppContext) {
 829        let (_, index, rust_lang_id) = init_test(cx).await;
 830
 831        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 832        let index_state = index_state.lock().await;
 833        let decls = index_state.declarations_for_identifier::<1>(&Identifier {
 834            name: "main".into(),
 835            language_id: rust_lang_id,
 836        });
 837        assert_eq!(decls.len(), 0);
 838    }
 839
 840    #[gpui::test]
 841    async fn test_buffer_shadow(cx: &mut TestAppContext) {
 842        let (project, index, rust_lang_id) = init_test(cx).await;
 843
 844        let main = Identifier {
 845            name: "main".into(),
 846            language_id: rust_lang_id,
 847        };
 848
 849        let buffer = project
 850            .update(cx, |project, cx| {
 851                let project_path = project.find_project_path("c.rs", cx).unwrap();
 852                project.open_buffer(project_path, cx)
 853            })
 854            .await
 855            .unwrap();
 856
 857        cx.run_until_parked();
 858
 859        let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
 860        {
 861            let index_state = index_state_arc.lock().await;
 862
 863            cx.update(|cx| {
 864                let decls = index_state.declarations_for_identifier::<8>(&main);
 865                assert_eq!(decls.len(), 2);
 866                let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 867                assert_eq!(decl.identifier, main);
 868                assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
 869
 870                expect_file_decl("a.rs", &decls[1].1, &project, cx);
 871            });
 872        }
 873
 874        // Drop the buffer and wait for release
 875        cx.update(|_| {
 876            drop(buffer);
 877        });
 878        cx.run_until_parked();
 879
 880        let index_state = index_state_arc.lock().await;
 881
 882        cx.update(|cx| {
 883            let decls = index_state.declarations_for_identifier::<8>(&main);
 884            assert_eq!(decls.len(), 2);
 885            expect_file_decl("a.rs", &decls[0].1, &project, cx);
 886            expect_file_decl("c.rs", &decls[1].1, &project, cx);
 887        });
 888    }
 889
 890    fn expect_buffer_decl<'a>(
 891        path: &str,
 892        declaration: &'a Declaration,
 893        project: &Entity<Project>,
 894        cx: &App,
 895    ) -> &'a BufferDeclaration {
 896        if let Declaration::Buffer {
 897            declaration,
 898            project_entry_id,
 899            ..
 900        } = declaration
 901        {
 902            let project_path = project
 903                .read(cx)
 904                .path_for_entry(*project_entry_id, cx)
 905                .unwrap();
 906            assert_eq!(project_path.path.as_ref(), rel_path(path),);
 907            declaration
 908        } else {
 909            panic!("Expected a buffer declaration, found {:?}", declaration);
 910        }
 911    }
 912
 913    fn expect_file_decl<'a>(
 914        path: &str,
 915        declaration: &'a Declaration,
 916        project: &Entity<Project>,
 917        cx: &App,
 918    ) -> &'a FileDeclaration {
 919        if let Declaration::File {
 920            declaration,
 921            project_entry_id: file,
 922        } = declaration
 923        {
 924            assert_eq!(
 925                project
 926                    .read(cx)
 927                    .path_for_entry(*file, cx)
 928                    .unwrap()
 929                    .path
 930                    .as_ref(),
 931                rel_path(path),
 932            );
 933            declaration
 934        } else {
 935            panic!("Expected a file declaration, found {:?}", declaration);
 936        }
 937    }
 938
 939    async fn init_test(
 940        cx: &mut TestAppContext,
 941    ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
 942        cx.update(|cx| {
 943            let settings_store = SettingsStore::test(cx);
 944            cx.set_global(settings_store);
 945            language::init(cx);
 946            Project::init_settings(cx);
 947        });
 948
 949        let fs = FakeFs::new(cx.executor());
 950        fs.insert_tree(
 951            path!("/root"),
 952            json!({
 953                "a.rs": indoc! {r#"
 954                    fn main() {
 955                        let x = 1;
 956                        let y = 2;
 957                        let z = add(x, y);
 958                        println!("Result: {}", z);
 959                    }
 960
 961                    fn add(a: i32, b: i32) -> i32 {
 962                        a + b
 963                    }
 964                "#},
 965                "b.rs": indoc! {"
 966                    pub struct Config {
 967                        pub name: String,
 968                        pub value: i32,
 969                    }
 970
 971                    impl Config {
 972                        pub fn new(name: String, value: i32) -> Self {
 973                            Config { name, value }
 974                        }
 975                    }
 976                "},
 977                "c.rs": indoc! {r#"
 978                    use std::collections::HashMap;
 979
 980                    fn main() {
 981                        let args: Vec<String> = std::env::args().collect();
 982                        let data: Vec<i32> = args[1..]
 983                            .iter()
 984                            .filter_map(|s| s.parse().ok())
 985                            .collect();
 986                        let result = process_data(data);
 987                        println!("{:?}", result);
 988                    }
 989
 990                    fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
 991                        let mut counts = HashMap::new();
 992                        for value in data {
 993                            *counts.entry(value).or_insert(0) += 1;
 994                        }
 995                        counts
 996                    }
 997
 998                    #[cfg(test)]
 999                    mod tests {
1000                        use super::*;
1001
1002                        #[test]
1003                        fn test_process_data() {
1004                            let data = vec![1, 2, 2, 3];
1005                            let result = process_data(data);
1006                            assert_eq!(result.get(&2), Some(&2));
1007                        }
1008                    }
1009                "#}
1010            }),
1011        )
1012        .await;
1013        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1014        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1015        let lang = rust_lang();
1016        let lang_id = lang.id();
1017        language_registry.add(Arc::new(lang));
1018
1019        let file_indexing_parallelism = 2;
1020        let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1021        cx.run_until_parked();
1022
1023        (project, index, lang_id)
1024    }
1025
1026    fn rust_lang() -> Language {
1027        Language::new(
1028            LanguageConfig {
1029                name: "Rust".into(),
1030                matcher: LanguageMatcher {
1031                    path_suffixes: vec!["rs".to_string()],
1032                    ..Default::default()
1033                },
1034                ..Default::default()
1035            },
1036            Some(tree_sitter_rust::LANGUAGE.into()),
1037        )
1038        .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1039        .unwrap()
1040    }
1041}