syntax_index.rs

   1use anyhow::{Result, anyhow};
   2use collections::{HashMap, HashSet};
   3use futures::channel::mpsc;
   4use futures::lock::Mutex;
   5use futures::{FutureExt as _, StreamExt, future};
   6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
   7use itertools::Itertools;
   8use language::{Buffer, BufferEvent};
   9use postage::stream::Stream as _;
  10use project::buffer_store::{BufferStore, BufferStoreEvent};
  11use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
  12use project::{PathChange, Project, ProjectEntryId, ProjectPath};
  13use slotmap::SlotMap;
  14use std::iter;
  15use std::ops::{DerefMut, Range};
  16use std::sync::Arc;
  17use text::BufferId;
  18use util::{RangeExt as _, debug_panic, some_or_debug_panic};
  19
  20use crate::declaration::{
  21    BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
  22};
  23use crate::outline::declarations_in_buffer;
  24
  25// TODO
  26//
  27// * Also queue / debounce buffer changes. A challenge for this is that use of
  28// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
  29//
  30// * Add a per language configuration for skipping indexing.
  31
  32// Potential future improvements:
  33//
  34// * Prevent indexing of a large file from blocking the queue.
  35//
  36// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
  37// references are present and their scores.
  38//
  39// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
  40// file in a build dependency. Should not be editable in that case - but how to distinguish the case
  41// where it should be editable?
  42
  43// Potential future optimizations:
  44//
  45// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
  46// of priority system to the background executor could help - it's single threaded for now to avoid
  47// interfering with other work.
  48//
  49// * Parse files directly instead of loading into a Rope.
  50//
  51//   - This would allow the task handling dirty_files to be done entirely on the background executor.
  52//
  53//   - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
  54//   but that can be done by scanning characters in the flat representation.
  55//
  56// * Use something similar to slotmap without key versions.
  57//
  58// * Concurrent slotmap
  59
  60pub struct SyntaxIndex {
  61    state: Arc<Mutex<SyntaxIndexState>>,
  62    project: WeakEntity<Project>,
  63    initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
  64}
  65
  66pub struct SyntaxIndexState {
  67    declarations: SlotMap<DeclarationId, Declaration>,
  68    identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
  69    files: HashMap<ProjectEntryId, FileState>,
  70    buffers: HashMap<BufferId, BufferState>,
  71    dirty_files: HashMap<ProjectEntryId, ProjectPath>,
  72    dirty_files_tx: mpsc::Sender<()>,
  73    _file_indexing_task: Option<Task<()>>,
  74}
  75
  76#[derive(Debug, Default)]
  77struct FileState {
  78    declarations: Vec<DeclarationId>,
  79}
  80
  81#[derive(Default)]
  82struct BufferState {
  83    declarations: Vec<DeclarationId>,
  84    task: Option<Task<()>>,
  85}
  86
  87impl SyntaxIndex {
  88    pub fn new(
  89        project: &Entity<Project>,
  90        file_indexing_parallelism: usize,
  91        cx: &mut Context<Self>,
  92    ) -> Self {
  93        assert!(file_indexing_parallelism > 0);
  94        let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
  95        let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
  96            postage::watch::channel();
  97
  98        let initial_state = SyntaxIndexState {
  99            declarations: SlotMap::default(),
 100            identifiers: HashMap::default(),
 101            files: HashMap::default(),
 102            buffers: HashMap::default(),
 103            dirty_files: HashMap::default(),
 104            dirty_files_tx,
 105            _file_indexing_task: None,
 106        };
 107        let this = Self {
 108            project: project.downgrade(),
 109            state: Arc::new(Mutex::new(initial_state)),
 110            initial_file_indexing_done_rx,
 111        };
 112
 113        let worktree_store = project.read(cx).worktree_store();
 114        let initial_worktree_snapshots = worktree_store
 115            .read(cx)
 116            .worktrees()
 117            .map(|w| w.read(cx).snapshot())
 118            .collect::<Vec<_>>();
 119        if !initial_worktree_snapshots.is_empty() {
 120            this.state.try_lock().unwrap()._file_indexing_task =
 121                Some(cx.spawn(async move |this, cx| {
 122                    let snapshots_file_count = initial_worktree_snapshots
 123                        .iter()
 124                        .map(|worktree| worktree.file_count())
 125                        .sum::<usize>();
 126                    let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
 127                    let chunk_count = snapshots_file_count.div_ceil(chunk_size);
 128                    let file_chunks = initial_worktree_snapshots
 129                        .iter()
 130                        .flat_map(|worktree| {
 131                            let worktree_id = worktree.id();
 132                            worktree.files(false, 0).map(move |entry| {
 133                                (
 134                                    entry.id,
 135                                    ProjectPath {
 136                                        worktree_id,
 137                                        path: entry.path.clone(),
 138                                    },
 139                                )
 140                            })
 141                        })
 142                        .chunks(chunk_size);
 143
 144                    let mut tasks = Vec::with_capacity(chunk_count);
 145                    for chunk in file_chunks.into_iter() {
 146                        tasks.push(Self::update_dirty_files(
 147                            &this,
 148                            chunk.into_iter().collect(),
 149                            cx.clone(),
 150                        ));
 151                    }
 152                    futures::future::join_all(tasks).await;
 153
 154                    log::info!("Finished initial file indexing");
 155                    *initial_file_indexing_done_tx.borrow_mut() = true;
 156
 157                    let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
 158                        return;
 159                    };
 160                    while dirty_files_rx.next().await.is_some() {
 161                        let mut state = state.lock().await;
 162                        let was_underused = state.dirty_files.capacity() > 255
 163                            && state.dirty_files.len() * 8 < state.dirty_files.capacity();
 164                        let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
 165                        if was_underused {
 166                            state.dirty_files.shrink_to_fit();
 167                        }
 168                        drop(state);
 169                        if dirty_files.is_empty() {
 170                            continue;
 171                        }
 172
 173                        let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
 174                        let chunk_count = dirty_files.len().div_ceil(chunk_size);
 175                        let mut tasks = Vec::with_capacity(chunk_count);
 176                        let chunks = dirty_files.into_iter().chunks(chunk_size);
 177                        for chunk in chunks.into_iter() {
 178                            tasks.push(Self::update_dirty_files(
 179                                &this,
 180                                chunk.into_iter().collect(),
 181                                cx.clone(),
 182                            ));
 183                        }
 184                        futures::future::join_all(tasks).await;
 185                    }
 186                }));
 187        }
 188
 189        cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
 190            .detach();
 191
 192        let buffer_store = project.read(cx).buffer_store().clone();
 193        for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
 194            this.register_buffer(&buffer, cx);
 195        }
 196        cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
 197            .detach();
 198
 199        this
 200    }
 201
 202    async fn update_dirty_files(
 203        this: &WeakEntity<Self>,
 204        dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
 205        mut cx: AsyncApp,
 206    ) {
 207        for (entry_id, project_path) in dirty_files {
 208            let Ok(task) = this.update(&mut cx, |this, cx| {
 209                this.update_file(entry_id, project_path, cx)
 210            }) else {
 211                return;
 212            };
 213            task.await;
 214        }
 215    }
 216
 217    pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
 218        if *self.initial_file_indexing_done_rx.borrow() {
 219            Task::ready(Ok(()))
 220        } else {
 221            let mut rx = self.initial_file_indexing_done_rx.clone();
 222            cx.background_spawn(async move {
 223                loop {
 224                    match rx.recv().await {
 225                        Some(true) => return Ok(()),
 226                        Some(false) => {}
 227                        None => {
 228                            return Err(anyhow!(
 229                                "SyntaxIndex dropped while waiting for initial file indexing"
 230                            ));
 231                        }
 232                    }
 233                }
 234            })
 235        }
 236    }
 237
 238    pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
 239        let state = self.state.clone();
 240        let project = self.project.clone();
 241
 242        cx.spawn(async move |cx| {
 243            let state = state.lock().await;
 244            let Some(project) = project.upgrade() else {
 245                return vec![];
 246            };
 247            project
 248                .read_with(cx, |project, cx| {
 249                    state
 250                        .files
 251                        .keys()
 252                        .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
 253                        .collect()
 254                })
 255                .unwrap_or_default()
 256        })
 257    }
 258
 259    fn handle_worktree_store_event(
 260        &mut self,
 261        _worktree_store: Entity<WorktreeStore>,
 262        event: &WorktreeStoreEvent,
 263        cx: &mut Context<Self>,
 264    ) {
 265        use WorktreeStoreEvent::*;
 266        match event {
 267            WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
 268                let state = Arc::downgrade(&self.state);
 269                let worktree_id = *worktree_id;
 270                let updated_entries_set = updated_entries_set.clone();
 271                cx.background_spawn(async move {
 272                    let Some(state) = state.upgrade() else { return };
 273                    let mut state = state.lock().await;
 274                    for (path, entry_id, path_change) in updated_entries_set.iter() {
 275                        if let PathChange::Removed = path_change {
 276                            state.files.remove(entry_id);
 277                            state.dirty_files.remove(entry_id);
 278                        } else {
 279                            let project_path = ProjectPath {
 280                                worktree_id,
 281                                path: path.clone(),
 282                            };
 283                            state.dirty_files.insert(*entry_id, project_path);
 284                        }
 285                    }
 286                    match state.dirty_files_tx.try_send(()) {
 287                        Err(err) if err.is_disconnected() => {
 288                            log::error!("bug: syntax indexing queue is disconnected");
 289                        }
 290                        _ => {}
 291                    }
 292                })
 293                .detach();
 294            }
 295            WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
 296                let project_entry_id = *project_entry_id;
 297                self.with_state(cx, move |state| {
 298                    state.files.remove(&project_entry_id);
 299                })
 300            }
 301            _ => {}
 302        }
 303    }
 304
 305    fn handle_buffer_store_event(
 306        &mut self,
 307        _buffer_store: Entity<BufferStore>,
 308        event: &BufferStoreEvent,
 309        cx: &mut Context<Self>,
 310    ) {
 311        use BufferStoreEvent::*;
 312        match event {
 313            BufferAdded(buffer) => self.register_buffer(buffer, cx),
 314            BufferOpened { .. }
 315            | BufferChangedFilePath { .. }
 316            | BufferDropped { .. }
 317            | SharedBufferClosed { .. } => {}
 318        }
 319    }
 320
 321    pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
 322        &self.state
 323    }
 324
 325    fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
 326        if let Some(mut state) = self.state.try_lock() {
 327            f(&mut state);
 328            return;
 329        }
 330        let state = Arc::downgrade(&self.state);
 331        cx.background_spawn(async move {
 332            let Some(state) = state.upgrade() else {
 333                return;
 334            };
 335            let mut state = state.lock().await;
 336            f(&mut state)
 337        })
 338        .detach();
 339    }
 340
 341    fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
 342        let buffer_id = buffer.read(cx).remote_id();
 343        cx.observe_release(buffer, move |this, _buffer, cx| {
 344            this.with_state(cx, move |state| {
 345                if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
 346                    SyntaxIndexState::remove_buffer_declarations(
 347                        &buffer_state.declarations,
 348                        &mut state.declarations,
 349                        &mut state.identifiers,
 350                    );
 351                }
 352            })
 353        })
 354        .detach();
 355        cx.subscribe(buffer, Self::handle_buffer_event).detach();
 356
 357        self.update_buffer(buffer.clone(), cx);
 358    }
 359
 360    fn handle_buffer_event(
 361        &mut self,
 362        buffer: Entity<Buffer>,
 363        event: &BufferEvent,
 364        cx: &mut Context<Self>,
 365    ) {
 366        match event {
 367            BufferEvent::Edited => self.update_buffer(buffer, cx),
 368            _ => {}
 369        }
 370    }
 371
 372    fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
 373        let buffer = buffer_entity.read(cx);
 374        if buffer.language().is_none() {
 375            return;
 376        }
 377
 378        let Some(project_entry_id) =
 379            project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
 380        else {
 381            return;
 382        };
 383        let buffer_id = buffer.remote_id();
 384
 385        let mut parse_status = buffer.parse_status();
 386        let snapshot_task = cx.spawn({
 387            let weak_buffer = buffer_entity.downgrade();
 388            async move |_, cx| {
 389                while *parse_status.borrow() != language::ParseStatus::Idle {
 390                    parse_status.changed().await?;
 391                }
 392                weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 393            }
 394        });
 395
 396        let state = Arc::downgrade(&self.state);
 397        let task = cx.background_spawn(async move {
 398            // TODO: How to handle errors?
 399            let Ok(snapshot) = snapshot_task.await else {
 400                return;
 401            };
 402            let rope = snapshot.text.as_rope();
 403
 404            let declarations = declarations_in_buffer(&snapshot)
 405                .into_iter()
 406                .map(|item| {
 407                    (
 408                        item.parent_index,
 409                        BufferDeclaration::from_outline(item, &rope),
 410                    )
 411                })
 412                .collect::<Vec<_>>();
 413
 414            let Some(state) = state.upgrade() else {
 415                return;
 416            };
 417            let mut state = state.lock().await;
 418            let state = state.deref_mut();
 419
 420            let buffer_state = state
 421                .buffers
 422                .entry(buffer_id)
 423                .or_insert_with(Default::default);
 424
 425            SyntaxIndexState::remove_buffer_declarations(
 426                &buffer_state.declarations,
 427                &mut state.declarations,
 428                &mut state.identifiers,
 429            );
 430
 431            let mut new_ids = Vec::with_capacity(declarations.len());
 432            state.declarations.reserve(declarations.len());
 433            for (parent_index, mut declaration) in declarations {
 434                declaration.parent =
 435                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 436
 437                let identifier = declaration.identifier.clone();
 438                let declaration_id = state.declarations.insert(Declaration::Buffer {
 439                    rope: rope.clone(),
 440                    buffer_id,
 441                    declaration,
 442                    project_entry_id,
 443                });
 444                new_ids.push(declaration_id);
 445
 446                state
 447                    .identifiers
 448                    .entry(identifier)
 449                    .or_default()
 450                    .insert(declaration_id);
 451            }
 452
 453            buffer_state.declarations = new_ids;
 454        });
 455
 456        self.with_state(cx, move |state| {
 457            state
 458                .buffers
 459                .entry(buffer_id)
 460                .or_insert_with(Default::default)
 461                .task = Some(task)
 462        });
 463    }
 464
 465    fn update_file(
 466        &mut self,
 467        entry_id: ProjectEntryId,
 468        project_path: ProjectPath,
 469        cx: &mut Context<Self>,
 470    ) -> Task<()> {
 471        let Some(project) = self.project.upgrade() else {
 472            return Task::ready(());
 473        };
 474        let project = project.read(cx);
 475
 476        let language_registry = project.languages();
 477        let Some(available_language) =
 478            language_registry.language_for_file_path(project_path.path.as_std_path())
 479        else {
 480            return Task::ready(());
 481        };
 482        let language = if let Some(Ok(Ok(language))) = language_registry
 483            .load_language(&available_language)
 484            .now_or_never()
 485        {
 486            if language
 487                .grammar()
 488                .is_none_or(|grammar| grammar.outline_config.is_none())
 489            {
 490                return Task::ready(());
 491            }
 492            future::Either::Left(async { Ok(language) })
 493        } else {
 494            let language_registry = language_registry.clone();
 495            future::Either::Right(async move {
 496                anyhow::Ok(
 497                    language_registry
 498                        .load_language(&available_language)
 499                        .await??,
 500                )
 501            })
 502        };
 503
 504        let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
 505            return Task::ready(());
 506        };
 507
 508        let snapshot_task = worktree.update(cx, |worktree, cx| {
 509            let load_task = worktree.load_file(&project_path.path, cx);
 510            cx.spawn(async move |_this, cx| {
 511                let loaded_file = load_task.await?;
 512                let language = language.await?;
 513
 514                let buffer = cx.new(|cx| {
 515                    let mut buffer = Buffer::local(loaded_file.text, cx);
 516                    buffer.set_language(Some(language), cx);
 517                    buffer
 518                })?;
 519
 520                let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
 521                while *parse_status.borrow() != language::ParseStatus::Idle {
 522                    parse_status.changed().await?;
 523                }
 524
 525                buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
 526            })
 527        });
 528
 529        let state = Arc::downgrade(&self.state);
 530        cx.background_spawn(async move {
 531            // TODO: How to handle errors?
 532            let Ok(snapshot) = snapshot_task.await else {
 533                return;
 534            };
 535            let rope = snapshot.as_rope();
 536            let declarations = declarations_in_buffer(&snapshot)
 537                .into_iter()
 538                .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
 539                .collect::<Vec<_>>();
 540
 541            let Some(state) = state.upgrade() else {
 542                return;
 543            };
 544            let mut state = state.lock().await;
 545            let state = state.deref_mut();
 546
 547            let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
 548            for old_declaration_id in &file_state.declarations {
 549                let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
 550                    debug_panic!("declaration not found");
 551                    continue;
 552                };
 553                if let Some(identifier_declarations) =
 554                    state.identifiers.get_mut(declaration.identifier())
 555                {
 556                    identifier_declarations.remove(old_declaration_id);
 557                }
 558            }
 559
 560            let mut new_ids = Vec::with_capacity(declarations.len());
 561            state.declarations.reserve(declarations.len());
 562            for (parent_index, mut declaration) in declarations {
 563                declaration.parent =
 564                    parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
 565
 566                let identifier = declaration.identifier.clone();
 567                let declaration_id = state.declarations.insert(Declaration::File {
 568                    project_entry_id: entry_id,
 569                    declaration,
 570                });
 571                new_ids.push(declaration_id);
 572
 573                state
 574                    .identifiers
 575                    .entry(identifier)
 576                    .or_default()
 577                    .insert(declaration_id);
 578            }
 579            file_state.declarations = new_ids;
 580        })
 581    }
 582}
 583
 584impl SyntaxIndexState {
 585    pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
 586        self.declarations.get(id)
 587    }
 588
 589    /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
 590    ///
 591    /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
 592    pub fn declarations_for_identifier<const N: usize>(
 593        &self,
 594        identifier: &Identifier,
 595    ) -> Vec<(DeclarationId, &Declaration)> {
 596        // make sure to not have a large stack allocation
 597        assert!(N < 32);
 598
 599        let Some(declaration_ids) = self.identifiers.get(&identifier) else {
 600            return vec![];
 601        };
 602
 603        let mut result = Vec::with_capacity(N);
 604        let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
 605        let mut file_declarations = Vec::new();
 606
 607        for declaration_id in declaration_ids {
 608            let declaration = self.declarations.get(*declaration_id);
 609            let Some(declaration) = some_or_debug_panic(declaration) else {
 610                continue;
 611            };
 612            match declaration {
 613                Declaration::Buffer {
 614                    project_entry_id, ..
 615                } => {
 616                    included_buffer_entry_ids.push(*project_entry_id);
 617                    result.push((*declaration_id, declaration));
 618                    if result.len() == N {
 619                        return Vec::new();
 620                    }
 621                }
 622                Declaration::File {
 623                    project_entry_id, ..
 624                } => {
 625                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 626                        file_declarations.push((*declaration_id, declaration));
 627                    }
 628                }
 629            }
 630        }
 631
 632        for (declaration_id, declaration) in file_declarations {
 633            match declaration {
 634                Declaration::File {
 635                    project_entry_id, ..
 636                } => {
 637                    if !included_buffer_entry_ids.contains(&project_entry_id) {
 638                        result.push((declaration_id, declaration));
 639
 640                        if result.len() == N {
 641                            return Vec::new();
 642                        }
 643                    }
 644                }
 645                Declaration::Buffer { .. } => {}
 646            }
 647        }
 648
 649        result
 650    }
 651
 652    pub fn buffer_declarations_containing_range(
 653        &self,
 654        buffer_id: BufferId,
 655        range: Range<usize>,
 656    ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
 657        let Some(buffer_state) = self.buffers.get(&buffer_id) else {
 658            return itertools::Either::Left(iter::empty());
 659        };
 660
 661        let iter = buffer_state
 662            .declarations
 663            .iter()
 664            .filter_map(move |declaration_id| {
 665                let Some(declaration) = self
 666                    .declarations
 667                    .get(*declaration_id)
 668                    .and_then(|d| d.as_buffer())
 669                else {
 670                    log::error!("bug: missing buffer outline declaration");
 671                    return None;
 672                };
 673                if declaration.item_range.contains_inclusive(&range) {
 674                    return Some((*declaration_id, declaration));
 675                }
 676                return None;
 677            });
 678        itertools::Either::Right(iter)
 679    }
 680
 681    pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
 682        match declaration {
 683            Declaration::File {
 684                project_entry_id, ..
 685            } => self
 686                .files
 687                .get(project_entry_id)
 688                .map(|file_state| file_state.declarations.len())
 689                .unwrap_or_default(),
 690            Declaration::Buffer { buffer_id, .. } => self
 691                .buffers
 692                .get(buffer_id)
 693                .map(|buffer_state| buffer_state.declarations.len())
 694                .unwrap_or_default(),
 695        }
 696    }
 697
 698    fn remove_buffer_declarations(
 699        old_declaration_ids: &[DeclarationId],
 700        declarations: &mut SlotMap<DeclarationId, Declaration>,
 701        identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
 702    ) {
 703        for old_declaration_id in old_declaration_ids {
 704            let Some(declaration) = declarations.remove(*old_declaration_id) else {
 705                debug_panic!("declaration not found");
 706                continue;
 707            };
 708            if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
 709                identifier_declarations.remove(old_declaration_id);
 710            }
 711        }
 712    }
 713}
 714
 715#[cfg(test)]
 716mod tests {
 717    use super::*;
 718    use std::sync::Arc;
 719
 720    use gpui::TestAppContext;
 721    use indoc::indoc;
 722    use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
 723    use project::{FakeFs, Project};
 724    use serde_json::json;
 725    use settings::SettingsStore;
 726    use text::OffsetRangeExt as _;
 727    use util::{path, rel_path::rel_path};
 728
 729    use crate::syntax_index::SyntaxIndex;
 730
 731    #[gpui::test]
 732    async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
 733        let (project, index, rust_lang_id) = init_test(cx).await;
 734        let main = Identifier {
 735            name: "main".into(),
 736            language_id: rust_lang_id,
 737        };
 738
 739        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 740        let index_state = index_state.lock().await;
 741        cx.update(|cx| {
 742            let decls = index_state.declarations_for_identifier::<8>(&main);
 743            assert_eq!(decls.len(), 2);
 744
 745            let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
 746            assert_eq!(decl.identifier, main);
 747            assert_eq!(decl.item_range, 0..98);
 748
 749            let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
 750            assert_eq!(decl.identifier, main.clone());
 751            assert_eq!(decl.item_range, 32..280);
 752        });
 753    }
 754
 755    #[gpui::test]
 756    async fn test_parents_in_file(cx: &mut TestAppContext) {
 757        let (project, index, rust_lang_id) = init_test(cx).await;
 758        let test_process_data = Identifier {
 759            name: "test_process_data".into(),
 760            language_id: rust_lang_id,
 761        };
 762
 763        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 764        let index_state = index_state.lock().await;
 765        cx.update(|cx| {
 766            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 767            assert_eq!(decls.len(), 1);
 768
 769            let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
 770            assert_eq!(decl.identifier, test_process_data);
 771
 772            let parent_id = decl.parent.unwrap();
 773            let parent = index_state.declaration(parent_id).unwrap();
 774            let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
 775            assert_eq!(
 776                parent_decl.identifier,
 777                Identifier {
 778                    name: "tests".into(),
 779                    language_id: rust_lang_id
 780                }
 781            );
 782            assert_eq!(parent_decl.parent, None);
 783        });
 784    }
 785
 786    #[gpui::test]
 787    async fn test_parents_in_buffer(cx: &mut TestAppContext) {
 788        let (project, index, rust_lang_id) = init_test(cx).await;
 789        let test_process_data = Identifier {
 790            name: "test_process_data".into(),
 791            language_id: rust_lang_id,
 792        };
 793
 794        let buffer = project
 795            .update(cx, |project, cx| {
 796                let project_path = project.find_project_path("c.rs", cx).unwrap();
 797                project.open_buffer(project_path, cx)
 798            })
 799            .await
 800            .unwrap();
 801
 802        cx.run_until_parked();
 803
 804        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 805        let index_state = index_state.lock().await;
 806        cx.update(|cx| {
 807            let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
 808            assert_eq!(decls.len(), 1);
 809
 810            let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 811            assert_eq!(decl.identifier, test_process_data);
 812
 813            let parent_id = decl.parent.unwrap();
 814            let parent = index_state.declaration(parent_id).unwrap();
 815            let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
 816            assert_eq!(
 817                parent_decl.identifier,
 818                Identifier {
 819                    name: "tests".into(),
 820                    language_id: rust_lang_id
 821                }
 822            );
 823            assert_eq!(parent_decl.parent, None);
 824        });
 825
 826        drop(buffer);
 827    }
 828
 829    #[gpui::test]
 830    async fn test_declarations_limt(cx: &mut TestAppContext) {
 831        let (_, index, rust_lang_id) = init_test(cx).await;
 832
 833        let index_state = index.read_with(cx, |index, _cx| index.state().clone());
 834        let index_state = index_state.lock().await;
 835        let decls = index_state.declarations_for_identifier::<1>(&Identifier {
 836            name: "main".into(),
 837            language_id: rust_lang_id,
 838        });
 839        assert_eq!(decls.len(), 0);
 840    }
 841
 842    #[gpui::test]
 843    async fn test_buffer_shadow(cx: &mut TestAppContext) {
 844        let (project, index, rust_lang_id) = init_test(cx).await;
 845
 846        let main = Identifier {
 847            name: "main".into(),
 848            language_id: rust_lang_id,
 849        };
 850
 851        let buffer = project
 852            .update(cx, |project, cx| {
 853                let project_path = project.find_project_path("c.rs", cx).unwrap();
 854                project.open_buffer(project_path, cx)
 855            })
 856            .await
 857            .unwrap();
 858
 859        cx.run_until_parked();
 860
 861        let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
 862        {
 863            let index_state = index_state_arc.lock().await;
 864
 865            cx.update(|cx| {
 866                let decls = index_state.declarations_for_identifier::<8>(&main);
 867                assert_eq!(decls.len(), 2);
 868                let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
 869                assert_eq!(decl.identifier, main);
 870                assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
 871
 872                expect_file_decl("a.rs", &decls[1].1, &project, cx);
 873            });
 874        }
 875
 876        // Drop the buffer and wait for release
 877        cx.update(|_| {
 878            drop(buffer);
 879        });
 880        cx.run_until_parked();
 881
 882        let index_state = index_state_arc.lock().await;
 883
 884        cx.update(|cx| {
 885            let decls = index_state.declarations_for_identifier::<8>(&main);
 886            assert_eq!(decls.len(), 2);
 887            expect_file_decl("a.rs", &decls[0].1, &project, cx);
 888            expect_file_decl("c.rs", &decls[1].1, &project, cx);
 889        });
 890    }
 891
 892    fn expect_buffer_decl<'a>(
 893        path: &str,
 894        declaration: &'a Declaration,
 895        project: &Entity<Project>,
 896        cx: &App,
 897    ) -> &'a BufferDeclaration {
 898        if let Declaration::Buffer {
 899            declaration,
 900            project_entry_id,
 901            ..
 902        } = declaration
 903        {
 904            let project_path = project
 905                .read(cx)
 906                .path_for_entry(*project_entry_id, cx)
 907                .unwrap();
 908            assert_eq!(project_path.path.as_ref(), rel_path(path),);
 909            declaration
 910        } else {
 911            panic!("Expected a buffer declaration, found {:?}", declaration);
 912        }
 913    }
 914
 915    fn expect_file_decl<'a>(
 916        path: &str,
 917        declaration: &'a Declaration,
 918        project: &Entity<Project>,
 919        cx: &App,
 920    ) -> &'a FileDeclaration {
 921        if let Declaration::File {
 922            declaration,
 923            project_entry_id: file,
 924        } = declaration
 925        {
 926            assert_eq!(
 927                project
 928                    .read(cx)
 929                    .path_for_entry(*file, cx)
 930                    .unwrap()
 931                    .path
 932                    .as_ref(),
 933                rel_path(path),
 934            );
 935            declaration
 936        } else {
 937            panic!("Expected a file declaration, found {:?}", declaration);
 938        }
 939    }
 940
 941    async fn init_test(
 942        cx: &mut TestAppContext,
 943    ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
 944        cx.update(|cx| {
 945            let settings_store = SettingsStore::test(cx);
 946            cx.set_global(settings_store);
 947            language::init(cx);
 948            Project::init_settings(cx);
 949        });
 950
 951        let fs = FakeFs::new(cx.executor());
 952        fs.insert_tree(
 953            path!("/root"),
 954            json!({
 955                "a.rs": indoc! {r#"
 956                    fn main() {
 957                        let x = 1;
 958                        let y = 2;
 959                        let z = add(x, y);
 960                        println!("Result: {}", z);
 961                    }
 962
 963                    fn add(a: i32, b: i32) -> i32 {
 964                        a + b
 965                    }
 966                "#},
 967                "b.rs": indoc! {"
 968                    pub struct Config {
 969                        pub name: String,
 970                        pub value: i32,
 971                    }
 972
 973                    impl Config {
 974                        pub fn new(name: String, value: i32) -> Self {
 975                            Config { name, value }
 976                        }
 977                    }
 978                "},
 979                "c.rs": indoc! {r#"
 980                    use std::collections::HashMap;
 981
 982                    fn main() {
 983                        let args: Vec<String> = std::env::args().collect();
 984                        let data: Vec<i32> = args[1..]
 985                            .iter()
 986                            .filter_map(|s| s.parse().ok())
 987                            .collect();
 988                        let result = process_data(data);
 989                        println!("{:?}", result);
 990                    }
 991
 992                    fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
 993                        let mut counts = HashMap::new();
 994                        for value in data {
 995                            *counts.entry(value).or_insert(0) += 1;
 996                        }
 997                        counts
 998                    }
 999
1000                    #[cfg(test)]
1001                    mod tests {
1002                        use super::*;
1003
1004                        #[test]
1005                        fn test_process_data() {
1006                            let data = vec![1, 2, 2, 3];
1007                            let result = process_data(data);
1008                            assert_eq!(result.get(&2), Some(&2));
1009                        }
1010                    }
1011                "#}
1012            }),
1013        )
1014        .await;
1015        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1016        let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1017        let lang = rust_lang();
1018        let lang_id = lang.id();
1019        language_registry.add(Arc::new(lang));
1020
1021        let file_indexing_parallelism = 2;
1022        let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1023        cx.run_until_parked();
1024
1025        (project, index, lang_id)
1026    }
1027
1028    fn rust_lang() -> Language {
1029        Language::new(
1030            LanguageConfig {
1031                name: "Rust".into(),
1032                matcher: LanguageMatcher {
1033                    path_suffixes: vec!["rs".to_string()],
1034                    ..Default::default()
1035                },
1036                ..Default::default()
1037            },
1038            Some(tree_sitter_rust::LANGUAGE.into()),
1039        )
1040        .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1041        .unwrap()
1042    }
1043}