1use anyhow::{Result, anyhow};
2use collections::{HashMap, HashSet};
3use futures::channel::mpsc;
4use futures::lock::Mutex;
5use futures::{FutureExt as _, StreamExt, future};
6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
7use itertools::Itertools;
8
9use language::{Buffer, BufferEvent};
10use postage::stream::Stream as _;
11use project::buffer_store::{BufferStore, BufferStoreEvent};
12use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
13use project::{PathChange, Project, ProjectEntryId, ProjectPath};
14use slotmap::SlotMap;
15use std::iter;
16use std::ops::{DerefMut, Range};
17use std::sync::Arc;
18use text::BufferId;
19use util::{RangeExt as _, debug_panic, some_or_debug_panic};
20
21use crate::CachedDeclarationPath;
22use crate::declaration::{
23 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
24};
25use crate::outline::declarations_in_buffer;
26
27// TODO
28//
29// * Also queue / debounce buffer changes. A challenge for this is that use of
30// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
31//
32// * Add a per language configuration for skipping indexing.
33//
34// * Handle tsx / ts / js referencing each-other
35
36// Potential future improvements:
37//
38// * Prevent indexing of a large file from blocking the queue.
39//
40// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
41// references are present and their scores.
42//
43// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
44// file in a build dependency. Should not be editable in that case - but how to distinguish the case
45// where it should be editable?
46
47// Potential future optimizations:
48//
49// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
50// of priority system to the background executor could help - it's single threaded for now to avoid
51// interfering with other work.
52//
53// * Parse files directly instead of loading into a Rope.
54//
55// - This would allow the task handling dirty_files to be done entirely on the background executor.
56//
57// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
58// but that can be done by scanning characters in the flat representation.
59//
60// * Use something similar to slotmap without key versions.
61//
62// * Concurrent slotmap
63
64pub struct SyntaxIndex {
65 state: Arc<Mutex<SyntaxIndexState>>,
66 project: WeakEntity<Project>,
67 initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
68 _file_indexing_task: Option<Task<()>>,
69}
70
71pub struct SyntaxIndexState {
72 declarations: SlotMap<DeclarationId, Declaration>,
73 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
74 files: HashMap<ProjectEntryId, FileState>,
75 buffers: HashMap<BufferId, BufferState>,
76 dirty_files: HashMap<ProjectEntryId, ProjectPath>,
77 dirty_files_tx: mpsc::Sender<()>,
78}
79
80#[derive(Debug, Default)]
81struct FileState {
82 declarations: Vec<DeclarationId>,
83}
84
85#[derive(Default)]
86struct BufferState {
87 declarations: Vec<DeclarationId>,
88 task: Option<Task<()>>,
89}
90
91impl SyntaxIndex {
92 pub fn new(
93 project: &Entity<Project>,
94 file_indexing_parallelism: usize,
95 cx: &mut Context<Self>,
96 ) -> Self {
97 assert!(file_indexing_parallelism > 0);
98 let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
99 let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
100 postage::watch::channel();
101
102 let initial_state = SyntaxIndexState {
103 declarations: SlotMap::default(),
104 identifiers: HashMap::default(),
105 files: HashMap::default(),
106 buffers: HashMap::default(),
107 dirty_files: HashMap::default(),
108 dirty_files_tx,
109 };
110 let mut this = Self {
111 project: project.downgrade(),
112 state: Arc::new(Mutex::new(initial_state)),
113 initial_file_indexing_done_rx,
114 _file_indexing_task: None,
115 };
116
117 let worktree_store = project.read(cx).worktree_store();
118 let initial_worktree_snapshots = worktree_store
119 .read(cx)
120 .worktrees()
121 .map(|w| w.read(cx).snapshot())
122 .collect::<Vec<_>>();
123 this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
124 let snapshots_file_count = initial_worktree_snapshots
125 .iter()
126 .map(|worktree| worktree.file_count())
127 .sum::<usize>();
128 if snapshots_file_count > 0 {
129 let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
130 let chunk_count = snapshots_file_count.div_ceil(chunk_size);
131 let file_chunks = initial_worktree_snapshots
132 .iter()
133 .flat_map(|worktree| {
134 let worktree_id = worktree.id();
135 worktree.files(false, 0).map(move |entry| {
136 (
137 entry.id,
138 ProjectPath {
139 worktree_id,
140 path: entry.path.clone(),
141 },
142 )
143 })
144 })
145 .chunks(chunk_size);
146
147 let mut tasks = Vec::with_capacity(chunk_count);
148 for chunk in file_chunks.into_iter() {
149 tasks.push(Self::update_dirty_files(
150 &this,
151 chunk.into_iter().collect(),
152 cx.clone(),
153 ));
154 }
155 futures::future::join_all(tasks).await;
156 log::info!("Finished initial file indexing");
157 }
158
159 *initial_file_indexing_done_tx.borrow_mut() = true;
160
161 let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
162 return;
163 };
164 while dirty_files_rx.next().await.is_some() {
165 let Some(state) = state.upgrade() else {
166 return;
167 };
168 let mut state = state.lock().await;
169 let was_underused = state.dirty_files.capacity() > 255
170 && state.dirty_files.len() * 8 < state.dirty_files.capacity();
171 let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
172 if was_underused {
173 state.dirty_files.shrink_to_fit();
174 }
175 drop(state);
176 if dirty_files.is_empty() {
177 continue;
178 }
179
180 let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
181 let chunk_count = dirty_files.len().div_ceil(chunk_size);
182 let mut tasks = Vec::with_capacity(chunk_count);
183 let chunks = dirty_files.into_iter().chunks(chunk_size);
184 for chunk in chunks.into_iter() {
185 tasks.push(Self::update_dirty_files(
186 &this,
187 chunk.into_iter().collect(),
188 cx.clone(),
189 ));
190 }
191 futures::future::join_all(tasks).await;
192 }
193 }));
194
195 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
196 .detach();
197
198 let buffer_store = project.read(cx).buffer_store().clone();
199 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
200 this.register_buffer(&buffer, cx);
201 }
202 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
203 .detach();
204
205 this
206 }
207
208 async fn update_dirty_files(
209 this: &WeakEntity<Self>,
210 dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
211 mut cx: AsyncApp,
212 ) {
213 for (entry_id, project_path) in dirty_files {
214 let Ok(task) = this.update(&mut cx, |this, cx| {
215 this.update_file(entry_id, project_path, cx)
216 }) else {
217 return;
218 };
219 task.await;
220 }
221 }
222
223 pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
224 if *self.initial_file_indexing_done_rx.borrow() {
225 Task::ready(Ok(()))
226 } else {
227 let mut rx = self.initial_file_indexing_done_rx.clone();
228 cx.background_spawn(async move {
229 loop {
230 match rx.recv().await {
231 Some(true) => return Ok(()),
232 Some(false) => {}
233 None => {
234 return Err(anyhow!(
235 "SyntaxIndex dropped while waiting for initial file indexing"
236 ));
237 }
238 }
239 }
240 })
241 }
242 }
243
244 pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
245 let state = self.state.clone();
246 let project = self.project.clone();
247
248 cx.spawn(async move |cx| {
249 let state = state.lock().await;
250 let Some(project) = project.upgrade() else {
251 return vec![];
252 };
253 project
254 .read_with(cx, |project, cx| {
255 state
256 .files
257 .keys()
258 .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
259 .collect()
260 })
261 .unwrap_or_default()
262 })
263 }
264
265 fn handle_worktree_store_event(
266 &mut self,
267 _worktree_store: Entity<WorktreeStore>,
268 event: &WorktreeStoreEvent,
269 cx: &mut Context<Self>,
270 ) {
271 use WorktreeStoreEvent::*;
272 match event {
273 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
274 let state = Arc::downgrade(&self.state);
275 let worktree_id = *worktree_id;
276 let updated_entries_set = updated_entries_set.clone();
277 cx.background_spawn(async move {
278 let Some(state) = state.upgrade() else { return };
279 let mut state = state.lock().await;
280 for (path, entry_id, path_change) in updated_entries_set.iter() {
281 if let PathChange::Removed = path_change {
282 state.files.remove(entry_id);
283 state.dirty_files.remove(entry_id);
284 } else {
285 let project_path = ProjectPath {
286 worktree_id,
287 path: path.clone(),
288 };
289 state.dirty_files.insert(*entry_id, project_path);
290 }
291 }
292 match state.dirty_files_tx.try_send(()) {
293 Err(err) if err.is_disconnected() => {
294 log::error!("bug: syntax indexing queue is disconnected");
295 }
296 _ => {}
297 }
298 })
299 .detach();
300 }
301 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
302 let project_entry_id = *project_entry_id;
303 self.with_state(cx, move |state| {
304 state.files.remove(&project_entry_id);
305 })
306 }
307 _ => {}
308 }
309 }
310
311 fn handle_buffer_store_event(
312 &mut self,
313 _buffer_store: Entity<BufferStore>,
314 event: &BufferStoreEvent,
315 cx: &mut Context<Self>,
316 ) {
317 use BufferStoreEvent::*;
318 match event {
319 BufferAdded(buffer) => self.register_buffer(buffer, cx),
320 BufferOpened { .. }
321 | BufferChangedFilePath { .. }
322 | BufferDropped { .. }
323 | SharedBufferClosed { .. } => {}
324 }
325 }
326
327 pub const fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
328 &self.state
329 }
330
331 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
332 if let Some(mut state) = self.state.try_lock() {
333 f(&mut state);
334 return;
335 }
336 let state = Arc::downgrade(&self.state);
337 cx.background_spawn(async move {
338 let Some(state) = state.upgrade() else {
339 return;
340 };
341 let mut state = state.lock().await;
342 f(&mut state)
343 })
344 .detach();
345 }
346
347 fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
348 let buffer_id = buffer.read(cx).remote_id();
349 cx.observe_release(buffer, move |this, _buffer, cx| {
350 this.with_state(cx, move |state| {
351 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
352 SyntaxIndexState::remove_buffer_declarations(
353 &buffer_state.declarations,
354 &mut state.declarations,
355 &mut state.identifiers,
356 );
357 }
358 })
359 })
360 .detach();
361 cx.subscribe(buffer, Self::handle_buffer_event).detach();
362
363 self.update_buffer(buffer.clone(), cx);
364 }
365
366 fn handle_buffer_event(
367 &mut self,
368 buffer: Entity<Buffer>,
369 event: &BufferEvent,
370 cx: &mut Context<Self>,
371 ) {
372 match event {
373 BufferEvent::Edited |
374 // paths are cached and so should be updated
375 BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
376 _ => {}
377 }
378 }
379
380 fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
381 let buffer = buffer_entity.read(cx);
382 if buffer.language().is_none() {
383 return;
384 }
385
386 let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
387 .and_then(|f| {
388 let project_entry_id = f.project_entry_id()?;
389 let cached_path = CachedDeclarationPath::new(
390 f.worktree.read(cx).abs_path(),
391 &f.path,
392 buffer.language(),
393 );
394 Some((project_entry_id, cached_path))
395 })
396 else {
397 return;
398 };
399 let buffer_id = buffer.remote_id();
400
401 let mut parse_status = buffer.parse_status();
402 let snapshot_task = cx.spawn({
403 let weak_buffer = buffer_entity.downgrade();
404 async move |_, cx| {
405 while *parse_status.borrow() != language::ParseStatus::Idle {
406 parse_status.changed().await?;
407 }
408 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
409 }
410 });
411
412 let state = Arc::downgrade(&self.state);
413 let task = cx.background_spawn(async move {
414 // TODO: How to handle errors?
415 let Ok(snapshot) = snapshot_task.await else {
416 return;
417 };
418 let rope = snapshot.text.as_rope();
419
420 let declarations = declarations_in_buffer(&snapshot)
421 .into_iter()
422 .map(|item| {
423 (
424 item.parent_index,
425 BufferDeclaration::from_outline(item, &rope),
426 )
427 })
428 .collect::<Vec<_>>();
429
430 let Some(state) = state.upgrade() else {
431 return;
432 };
433 let mut state = state.lock().await;
434 let state = state.deref_mut();
435
436 let buffer_state = state
437 .buffers
438 .entry(buffer_id)
439 .or_insert_with(Default::default);
440
441 SyntaxIndexState::remove_buffer_declarations(
442 &buffer_state.declarations,
443 &mut state.declarations,
444 &mut state.identifiers,
445 );
446
447 let mut new_ids = Vec::with_capacity(declarations.len());
448 state.declarations.reserve(declarations.len());
449 for (parent_index, mut declaration) in declarations {
450 declaration.parent =
451 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
452
453 let identifier = declaration.identifier.clone();
454 let declaration_id = state.declarations.insert(Declaration::Buffer {
455 rope: rope.clone(),
456 buffer_id,
457 declaration,
458 project_entry_id,
459 cached_path: cached_path.clone(),
460 });
461 new_ids.push(declaration_id);
462
463 state
464 .identifiers
465 .entry(identifier)
466 .or_default()
467 .insert(declaration_id);
468 }
469
470 buffer_state.declarations = new_ids;
471 });
472
473 self.with_state(cx, move |state| {
474 state
475 .buffers
476 .entry(buffer_id)
477 .or_insert_with(Default::default)
478 .task = Some(task)
479 });
480 }
481
482 fn update_file(
483 &mut self,
484 entry_id: ProjectEntryId,
485 project_path: ProjectPath,
486 cx: &mut Context<Self>,
487 ) -> Task<()> {
488 let Some(project) = self.project.upgrade() else {
489 return Task::ready(());
490 };
491 let project = project.read(cx);
492
493 let language_registry = project.languages();
494 let Some(available_language) =
495 language_registry.language_for_file_path(project_path.path.as_std_path())
496 else {
497 return Task::ready(());
498 };
499 let language = if let Some(Ok(Ok(language))) = language_registry
500 .load_language(&available_language)
501 .now_or_never()
502 {
503 if language
504 .grammar()
505 .is_none_or(|grammar| grammar.outline_config.is_none())
506 {
507 return Task::ready(());
508 }
509 future::Either::Left(async { Ok(language) })
510 } else {
511 let language_registry = language_registry.clone();
512 future::Either::Right(async move {
513 anyhow::Ok(
514 language_registry
515 .load_language(&available_language)
516 .await??,
517 )
518 })
519 };
520
521 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
522 return Task::ready(());
523 };
524
525 let snapshot_task = worktree.update(cx, |worktree, cx| {
526 let load_task = worktree.load_file(&project_path.path, cx);
527 let worktree_abs_path = worktree.abs_path();
528 cx.spawn(async move |_this, cx| {
529 let loaded_file = load_task.await?;
530 let language = language.await?;
531
532 let buffer = cx.new(|cx| {
533 let mut buffer = Buffer::local(loaded_file.text, cx);
534 buffer.set_language(Some(language.clone()), cx);
535 buffer
536 })?;
537
538 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
539 while *parse_status.borrow() != language::ParseStatus::Idle {
540 parse_status.changed().await?;
541 }
542
543 let cached_path = CachedDeclarationPath::new(
544 worktree_abs_path,
545 &project_path.path,
546 Some(&language),
547 );
548
549 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
550
551 anyhow::Ok((snapshot, cached_path))
552 })
553 });
554
555 let state = Arc::downgrade(&self.state);
556 cx.background_spawn(async move {
557 // TODO: How to handle errors?
558 let Ok((snapshot, cached_path)) = snapshot_task.await else {
559 return;
560 };
561 let rope = snapshot.as_rope();
562 let declarations = declarations_in_buffer(&snapshot)
563 .into_iter()
564 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
565 .collect::<Vec<_>>();
566
567 let Some(state) = state.upgrade() else {
568 return;
569 };
570 let mut state = state.lock().await;
571 let state = state.deref_mut();
572
573 let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
574 for old_declaration_id in &file_state.declarations {
575 let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
576 debug_panic!("declaration not found");
577 continue;
578 };
579 if let Some(identifier_declarations) =
580 state.identifiers.get_mut(declaration.identifier())
581 {
582 identifier_declarations.remove(old_declaration_id);
583 }
584 }
585
586 let mut new_ids = Vec::with_capacity(declarations.len());
587 state.declarations.reserve(declarations.len());
588 for (parent_index, mut declaration) in declarations {
589 declaration.parent =
590 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
591
592 let identifier = declaration.identifier.clone();
593 let declaration_id = state.declarations.insert(Declaration::File {
594 project_entry_id: entry_id,
595 declaration,
596 cached_path: cached_path.clone(),
597 });
598 new_ids.push(declaration_id);
599
600 state
601 .identifiers
602 .entry(identifier)
603 .or_default()
604 .insert(declaration_id);
605 }
606 file_state.declarations = new_ids;
607 })
608 }
609}
610
611impl SyntaxIndexState {
612 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
613 self.declarations.get(id)
614 }
615
616 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
617 ///
618 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
619 pub fn declarations_for_identifier<const N: usize>(
620 &self,
621 identifier: &Identifier,
622 ) -> Vec<(DeclarationId, &Declaration)> {
623 // make sure to not have a large stack allocation
624 assert!(N < 32);
625
626 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
627 return vec![];
628 };
629
630 let mut result = Vec::with_capacity(N);
631 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
632 let mut file_declarations = Vec::new();
633
634 for declaration_id in declaration_ids {
635 let declaration = self.declarations.get(*declaration_id);
636 let Some(declaration) = some_or_debug_panic(declaration) else {
637 continue;
638 };
639 match declaration {
640 Declaration::Buffer {
641 project_entry_id, ..
642 } => {
643 included_buffer_entry_ids.push(*project_entry_id);
644 result.push((*declaration_id, declaration));
645 if result.len() == N {
646 return Vec::new();
647 }
648 }
649 Declaration::File {
650 project_entry_id, ..
651 } => {
652 if !included_buffer_entry_ids.contains(&project_entry_id) {
653 file_declarations.push((*declaration_id, declaration));
654 }
655 }
656 }
657 }
658
659 for (declaration_id, declaration) in file_declarations {
660 match declaration {
661 Declaration::File {
662 project_entry_id, ..
663 } => {
664 if !included_buffer_entry_ids.contains(&project_entry_id) {
665 result.push((declaration_id, declaration));
666
667 if result.len() == N {
668 return Vec::new();
669 }
670 }
671 }
672 Declaration::Buffer { .. } => {}
673 }
674 }
675
676 result
677 }
678
679 pub fn buffer_declarations_containing_range(
680 &self,
681 buffer_id: BufferId,
682 range: Range<usize>,
683 ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
684 let Some(buffer_state) = self.buffers.get(&buffer_id) else {
685 return itertools::Either::Left(iter::empty());
686 };
687
688 let iter = buffer_state
689 .declarations
690 .iter()
691 .filter_map(move |declaration_id| {
692 let Some(declaration) = self
693 .declarations
694 .get(*declaration_id)
695 .and_then(|d| d.as_buffer())
696 else {
697 log::error!("bug: missing buffer outline declaration");
698 return None;
699 };
700 if declaration.item_range.contains_inclusive(&range) {
701 return Some((*declaration_id, declaration));
702 }
703 return None;
704 });
705 itertools::Either::Right(iter)
706 }
707
708 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
709 match declaration {
710 Declaration::File {
711 project_entry_id, ..
712 } => self
713 .files
714 .get(project_entry_id)
715 .map(|file_state| file_state.declarations.len())
716 .unwrap_or_default(),
717 Declaration::Buffer { buffer_id, .. } => self
718 .buffers
719 .get(buffer_id)
720 .map(|buffer_state| buffer_state.declarations.len())
721 .unwrap_or_default(),
722 }
723 }
724
725 fn remove_buffer_declarations(
726 old_declaration_ids: &[DeclarationId],
727 declarations: &mut SlotMap<DeclarationId, Declaration>,
728 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
729 ) {
730 for old_declaration_id in old_declaration_ids {
731 let Some(declaration) = declarations.remove(*old_declaration_id) else {
732 debug_panic!("declaration not found");
733 continue;
734 };
735 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
736 identifier_declarations.remove(old_declaration_id);
737 }
738 }
739 }
740}
741
742#[cfg(test)]
743mod tests {
744 use super::*;
745 use std::sync::Arc;
746
747 use gpui::TestAppContext;
748 use indoc::indoc;
749 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
750 use project::{FakeFs, Project};
751 use serde_json::json;
752 use settings::SettingsStore;
753 use text::OffsetRangeExt as _;
754 use util::{path, rel_path::rel_path};
755
756 use crate::syntax_index::SyntaxIndex;
757
758 #[gpui::test]
759 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
760 let (project, index, rust_lang_id) = init_test(cx).await;
761 let main = Identifier {
762 name: "main".into(),
763 language_id: rust_lang_id,
764 };
765
766 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
767 let index_state = index_state.lock().await;
768 cx.update(|cx| {
769 let decls = index_state.declarations_for_identifier::<8>(&main);
770 assert_eq!(decls.len(), 2);
771
772 let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
773 assert_eq!(decl.identifier, main);
774 assert_eq!(decl.item_range, 0..98);
775
776 let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
777 assert_eq!(decl.identifier, main.clone());
778 assert_eq!(decl.item_range, 32..280);
779 });
780 }
781
782 #[gpui::test]
783 async fn test_parents_in_file(cx: &mut TestAppContext) {
784 let (project, index, rust_lang_id) = init_test(cx).await;
785 let test_process_data = Identifier {
786 name: "test_process_data".into(),
787 language_id: rust_lang_id,
788 };
789
790 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
791 let index_state = index_state.lock().await;
792 cx.update(|cx| {
793 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
794 assert_eq!(decls.len(), 1);
795
796 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
797 assert_eq!(decl.identifier, test_process_data);
798
799 let parent_id = decl.parent.unwrap();
800 let parent = index_state.declaration(parent_id).unwrap();
801 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
802 assert_eq!(
803 parent_decl.identifier,
804 Identifier {
805 name: "tests".into(),
806 language_id: rust_lang_id
807 }
808 );
809 assert_eq!(parent_decl.parent, None);
810 });
811 }
812
813 #[gpui::test]
814 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
815 let (project, index, rust_lang_id) = init_test(cx).await;
816 let test_process_data = Identifier {
817 name: "test_process_data".into(),
818 language_id: rust_lang_id,
819 };
820
821 let buffer = project
822 .update(cx, |project, cx| {
823 let project_path = project.find_project_path("c.rs", cx).unwrap();
824 project.open_buffer(project_path, cx)
825 })
826 .await
827 .unwrap();
828
829 cx.run_until_parked();
830
831 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
832 let index_state = index_state.lock().await;
833 cx.update(|cx| {
834 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
835 assert_eq!(decls.len(), 1);
836
837 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
838 assert_eq!(decl.identifier, test_process_data);
839
840 let parent_id = decl.parent.unwrap();
841 let parent = index_state.declaration(parent_id).unwrap();
842 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
843 assert_eq!(
844 parent_decl.identifier,
845 Identifier {
846 name: "tests".into(),
847 language_id: rust_lang_id
848 }
849 );
850 assert_eq!(parent_decl.parent, None);
851 });
852
853 drop(buffer);
854 }
855
856 #[gpui::test]
857 async fn test_declarations_limt(cx: &mut TestAppContext) {
858 let (_, index, rust_lang_id) = init_test(cx).await;
859
860 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
861 let index_state = index_state.lock().await;
862 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
863 name: "main".into(),
864 language_id: rust_lang_id,
865 });
866 assert_eq!(decls.len(), 0);
867 }
868
869 #[gpui::test]
870 async fn test_buffer_shadow(cx: &mut TestAppContext) {
871 let (project, index, rust_lang_id) = init_test(cx).await;
872
873 let main = Identifier {
874 name: "main".into(),
875 language_id: rust_lang_id,
876 };
877
878 let buffer = project
879 .update(cx, |project, cx| {
880 let project_path = project.find_project_path("c.rs", cx).unwrap();
881 project.open_buffer(project_path, cx)
882 })
883 .await
884 .unwrap();
885
886 cx.run_until_parked();
887
888 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
889 {
890 let index_state = index_state_arc.lock().await;
891
892 cx.update(|cx| {
893 let decls = index_state.declarations_for_identifier::<8>(&main);
894 assert_eq!(decls.len(), 2);
895 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
896 assert_eq!(decl.identifier, main);
897 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
898
899 expect_file_decl("a.rs", &decls[1].1, &project, cx);
900 });
901 }
902
903 // Drop the buffer and wait for release
904 cx.update(|_| {
905 drop(buffer);
906 });
907 cx.run_until_parked();
908
909 let index_state = index_state_arc.lock().await;
910
911 cx.update(|cx| {
912 let decls = index_state.declarations_for_identifier::<8>(&main);
913 assert_eq!(decls.len(), 2);
914 expect_file_decl("a.rs", &decls[0].1, &project, cx);
915 expect_file_decl("c.rs", &decls[1].1, &project, cx);
916 });
917 }
918
919 fn expect_buffer_decl<'a>(
920 path: &str,
921 declaration: &'a Declaration,
922 project: &Entity<Project>,
923 cx: &App,
924 ) -> &'a BufferDeclaration {
925 if let Declaration::Buffer {
926 declaration,
927 project_entry_id,
928 ..
929 } = declaration
930 {
931 let project_path = project
932 .read(cx)
933 .path_for_entry(*project_entry_id, cx)
934 .unwrap();
935 assert_eq!(project_path.path.as_ref(), rel_path(path),);
936 declaration
937 } else {
938 panic!("Expected a buffer declaration, found {:?}", declaration);
939 }
940 }
941
942 fn expect_file_decl<'a>(
943 path: &str,
944 declaration: &'a Declaration,
945 project: &Entity<Project>,
946 cx: &App,
947 ) -> &'a FileDeclaration {
948 if let Declaration::File {
949 declaration,
950 project_entry_id: file,
951 ..
952 } = declaration
953 {
954 assert_eq!(
955 project
956 .read(cx)
957 .path_for_entry(*file, cx)
958 .unwrap()
959 .path
960 .as_ref(),
961 rel_path(path),
962 );
963 declaration
964 } else {
965 panic!("Expected a file declaration, found {:?}", declaration);
966 }
967 }
968
969 async fn init_test(
970 cx: &mut TestAppContext,
971 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
972 cx.update(|cx| {
973 let settings_store = SettingsStore::test(cx);
974 cx.set_global(settings_store);
975 language::init(cx);
976 Project::init_settings(cx);
977 });
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/root"),
982 json!({
983 "a.rs": indoc! {r#"
984 fn main() {
985 let x = 1;
986 let y = 2;
987 let z = add(x, y);
988 println!("Result: {}", z);
989 }
990
991 fn add(a: i32, b: i32) -> i32 {
992 a + b
993 }
994 "#},
995 "b.rs": indoc! {"
996 pub struct Config {
997 pub name: String,
998 pub value: i32,
999 }
1000
1001 impl Config {
1002 pub fn new(name: String, value: i32) -> Self {
1003 Config { name, value }
1004 }
1005 }
1006 "},
1007 "c.rs": indoc! {r#"
1008 use std::collections::HashMap;
1009
1010 fn main() {
1011 let args: Vec<String> = std::env::args().collect();
1012 let data: Vec<i32> = args[1..]
1013 .iter()
1014 .filter_map(|s| s.parse().ok())
1015 .collect();
1016 let result = process_data(data);
1017 println!("{:?}", result);
1018 }
1019
1020 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
1021 let mut counts = HashMap::new();
1022 for value in data {
1023 *counts.entry(value).or_insert(0) += 1;
1024 }
1025 counts
1026 }
1027
1028 #[cfg(test)]
1029 mod tests {
1030 use super::*;
1031
1032 #[test]
1033 fn test_process_data() {
1034 let data = vec![1, 2, 2, 3];
1035 let result = process_data(data);
1036 assert_eq!(result.get(&2), Some(&2));
1037 }
1038 }
1039 "#}
1040 }),
1041 )
1042 .await;
1043 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1044 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1045 let lang = rust_lang();
1046 let lang_id = lang.id();
1047 language_registry.add(Arc::new(lang));
1048
1049 let file_indexing_parallelism = 2;
1050 let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1051 cx.run_until_parked();
1052
1053 (project, index, lang_id)
1054 }
1055
1056 fn rust_lang() -> Language {
1057 Language::new(
1058 LanguageConfig {
1059 name: "Rust".into(),
1060 matcher: LanguageMatcher {
1061 path_suffixes: vec!["rs".to_string()],
1062 ..Default::default()
1063 },
1064 ..Default::default()
1065 },
1066 Some(tree_sitter_rust::LANGUAGE.into()),
1067 )
1068 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1069 .unwrap()
1070 }
1071}