1use anyhow::{Result, anyhow};
2use collections::{HashMap, HashSet};
3use futures::channel::mpsc;
4use futures::lock::Mutex;
5use futures::{FutureExt as _, StreamExt, future};
6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
7use itertools::Itertools;
8
9use language::{Buffer, BufferEvent};
10use postage::stream::Stream as _;
11use project::buffer_store::{BufferStore, BufferStoreEvent};
12use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
13use project::{PathChange, Project, ProjectEntryId, ProjectPath};
14use slotmap::SlotMap;
15use std::iter;
16use std::ops::{DerefMut, Range};
17use std::sync::Arc;
18use text::BufferId;
19use util::{RangeExt as _, debug_panic, some_or_debug_panic};
20
21use crate::CachedDeclarationPath;
22use crate::declaration::{
23 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
24};
25use crate::outline::declarations_in_buffer;
26
27// TODO
28//
29// * Also queue / debounce buffer changes. A challenge for this is that use of
30// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
31//
32// * Add a per language configuration for skipping indexing.
33//
34// * Handle tsx / ts / js referencing each-other
35
36// Potential future improvements:
37//
38// * Prevent indexing of a large file from blocking the queue.
39//
40// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
41// references are present and their scores.
42//
43// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
44// file in a build dependency. Should not be editable in that case - but how to distinguish the case
45// where it should be editable?
46
47// Potential future optimizations:
48//
49// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
50// of priority system to the background executor could help - it's single threaded for now to avoid
51// interfering with other work.
52//
53// * Parse files directly instead of loading into a Rope.
54//
55// - This would allow the task handling dirty_files to be done entirely on the background executor.
56//
57// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
58// but that can be done by scanning characters in the flat representation.
59//
60// * Use something similar to slotmap without key versions.
61//
62// * Concurrent slotmap
63
64pub struct SyntaxIndex {
65 state: Arc<Mutex<SyntaxIndexState>>,
66 project: WeakEntity<Project>,
67 initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
68 _file_indexing_task: Option<Task<()>>,
69}
70
71pub struct SyntaxIndexState {
72 declarations: SlotMap<DeclarationId, Declaration>,
73 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
74 files: HashMap<ProjectEntryId, FileState>,
75 buffers: HashMap<BufferId, BufferState>,
76 dirty_files: HashMap<ProjectEntryId, ProjectPath>,
77 dirty_files_tx: mpsc::Sender<()>,
78}
79
80#[derive(Debug, Default)]
81struct FileState {
82 declarations: Vec<DeclarationId>,
83}
84
85#[derive(Default)]
86struct BufferState {
87 declarations: Vec<DeclarationId>,
88 task: Option<Task<()>>,
89}
90
91impl SyntaxIndex {
92 pub fn new(
93 project: &Entity<Project>,
94 file_indexing_parallelism: usize,
95 cx: &mut Context<Self>,
96 ) -> Self {
97 assert!(file_indexing_parallelism > 0);
98 let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
99 let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
100 postage::watch::channel();
101
102 let initial_state = SyntaxIndexState {
103 declarations: SlotMap::default(),
104 identifiers: HashMap::default(),
105 files: HashMap::default(),
106 buffers: HashMap::default(),
107 dirty_files: HashMap::default(),
108 dirty_files_tx,
109 };
110 let mut this = Self {
111 project: project.downgrade(),
112 state: Arc::new(Mutex::new(initial_state)),
113 initial_file_indexing_done_rx,
114 _file_indexing_task: None,
115 };
116
117 let worktree_store = project.read(cx).worktree_store();
118 let initial_worktree_snapshots = worktree_store
119 .read(cx)
120 .worktrees()
121 .map(|w| w.read(cx).snapshot())
122 .collect::<Vec<_>>();
123 this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
124 let snapshots_file_count = initial_worktree_snapshots
125 .iter()
126 .map(|worktree| worktree.file_count())
127 .sum::<usize>();
128 if snapshots_file_count > 0 {
129 let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
130 let chunk_count = snapshots_file_count.div_ceil(chunk_size);
131 let file_chunks = initial_worktree_snapshots
132 .iter()
133 .flat_map(|worktree| {
134 let worktree_id = worktree.id();
135 worktree.files(false, 0).map(move |entry| {
136 (
137 entry.id,
138 ProjectPath {
139 worktree_id,
140 path: entry.path.clone(),
141 },
142 )
143 })
144 })
145 .chunks(chunk_size);
146
147 let mut tasks = Vec::with_capacity(chunk_count);
148 for chunk in file_chunks.into_iter() {
149 tasks.push(Self::update_dirty_files(
150 &this,
151 chunk.into_iter().collect(),
152 cx.clone(),
153 ));
154 }
155 futures::future::join_all(tasks).await;
156 log::info!("Finished initial file indexing");
157 }
158
159 *initial_file_indexing_done_tx.borrow_mut() = true;
160
161 let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
162 return;
163 };
164 while dirty_files_rx.next().await.is_some() {
165 let Some(state) = state.upgrade() else {
166 return;
167 };
168 let mut state = state.lock().await;
169 let was_underused = state.dirty_files.capacity() > 255
170 && state.dirty_files.len() * 8 < state.dirty_files.capacity();
171 let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
172 if was_underused {
173 state.dirty_files.shrink_to_fit();
174 }
175 drop(state);
176 if dirty_files.is_empty() {
177 continue;
178 }
179
180 let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
181 let chunk_count = dirty_files.len().div_ceil(chunk_size);
182 let mut tasks = Vec::with_capacity(chunk_count);
183 let chunks = dirty_files.into_iter().chunks(chunk_size);
184 for chunk in chunks.into_iter() {
185 tasks.push(Self::update_dirty_files(
186 &this,
187 chunk.into_iter().collect(),
188 cx.clone(),
189 ));
190 }
191 futures::future::join_all(tasks).await;
192 }
193 }));
194
195 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
196 .detach();
197
198 let buffer_store = project.read(cx).buffer_store().clone();
199 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
200 this.register_buffer(&buffer, cx);
201 }
202 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
203 .detach();
204
205 this
206 }
207
208 async fn update_dirty_files(
209 this: &WeakEntity<Self>,
210 dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
211 mut cx: AsyncApp,
212 ) {
213 for (entry_id, project_path) in dirty_files {
214 let Ok(task) = this.update(&mut cx, |this, cx| {
215 this.update_file(entry_id, project_path, cx)
216 }) else {
217 return;
218 };
219 task.await;
220 }
221 }
222
223 pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
224 if *self.initial_file_indexing_done_rx.borrow() {
225 Task::ready(Ok(()))
226 } else {
227 let mut rx = self.initial_file_indexing_done_rx.clone();
228 cx.background_spawn(async move {
229 loop {
230 match rx.recv().await {
231 Some(true) => return Ok(()),
232 Some(false) => {}
233 None => {
234 return Err(anyhow!(
235 "SyntaxIndex dropped while waiting for initial file indexing"
236 ));
237 }
238 }
239 }
240 })
241 }
242 }
243
244 pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
245 let state = self.state.clone();
246 let project = self.project.clone();
247
248 cx.spawn(async move |cx| {
249 let state = state.lock().await;
250 let Some(project) = project.upgrade() else {
251 return vec![];
252 };
253 project
254 .read_with(cx, |project, cx| {
255 state
256 .files
257 .keys()
258 .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
259 .collect()
260 })
261 .unwrap_or_default()
262 })
263 }
264
265 fn handle_worktree_store_event(
266 &mut self,
267 _worktree_store: Entity<WorktreeStore>,
268 event: &WorktreeStoreEvent,
269 cx: &mut Context<Self>,
270 ) {
271 use WorktreeStoreEvent::*;
272 match event {
273 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
274 let state = Arc::downgrade(&self.state);
275 let worktree_id = *worktree_id;
276 let updated_entries_set = updated_entries_set.clone();
277 cx.background_spawn(async move {
278 let Some(state) = state.upgrade() else { return };
279 let mut state = state.lock().await;
280 for (path, entry_id, path_change) in updated_entries_set.iter() {
281 if let PathChange::Removed = path_change {
282 state.files.remove(entry_id);
283 state.dirty_files.remove(entry_id);
284 } else {
285 let project_path = ProjectPath {
286 worktree_id,
287 path: path.clone(),
288 };
289 state.dirty_files.insert(*entry_id, project_path);
290 }
291 }
292 match state.dirty_files_tx.try_send(()) {
293 Err(err) if err.is_disconnected() => {
294 log::error!("bug: syntax indexing queue is disconnected");
295 }
296 _ => {}
297 }
298 })
299 .detach();
300 }
301 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
302 let project_entry_id = *project_entry_id;
303 self.with_state(cx, move |state| {
304 state.files.remove(&project_entry_id);
305 })
306 }
307 _ => {}
308 }
309 }
310
311 fn handle_buffer_store_event(
312 &mut self,
313 _buffer_store: Entity<BufferStore>,
314 event: &BufferStoreEvent,
315 cx: &mut Context<Self>,
316 ) {
317 use BufferStoreEvent::*;
318 match event {
319 BufferAdded(buffer) => self.register_buffer(buffer, cx),
320 BufferOpened { .. }
321 | BufferChangedFilePath { .. }
322 | BufferDropped { .. }
323 | SharedBufferClosed { .. } => {}
324 }
325 }
326
327 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
328 &self.state
329 }
330
331 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
332 if let Some(mut state) = self.state.try_lock() {
333 f(&mut state);
334 return;
335 }
336 let state = Arc::downgrade(&self.state);
337 cx.background_spawn(async move {
338 let Some(state) = state.upgrade() else {
339 return;
340 };
341 let mut state = state.lock().await;
342 f(&mut state)
343 })
344 .detach();
345 }
346
347 fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
348 let buffer_id = buffer.read(cx).remote_id();
349 cx.observe_release(buffer, move |this, _buffer, cx| {
350 this.with_state(cx, move |state| {
351 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
352 SyntaxIndexState::remove_buffer_declarations(
353 &buffer_state.declarations,
354 &mut state.declarations,
355 &mut state.identifiers,
356 );
357 }
358 })
359 })
360 .detach();
361 cx.subscribe(buffer, Self::handle_buffer_event).detach();
362
363 self.update_buffer(buffer.clone(), cx);
364 }
365
366 fn handle_buffer_event(
367 &mut self,
368 buffer: Entity<Buffer>,
369 event: &BufferEvent,
370 cx: &mut Context<Self>,
371 ) {
372 match event {
373 BufferEvent::Edited |
374 // paths are cached and so should be updated
375 BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
376 _ => {}
377 }
378 }
379
380 fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
381 let buffer = buffer_entity.read(cx);
382 if buffer.language().is_none() {
383 return;
384 }
385
386 let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
387 .and_then(|f| {
388 let project_entry_id = f.project_entry_id()?;
389 let cached_path = CachedDeclarationPath::new(
390 f.worktree.read(cx).abs_path(),
391 &f.path,
392 buffer.language(),
393 );
394 Some((project_entry_id, cached_path))
395 })
396 else {
397 return;
398 };
399 let buffer_id = buffer.remote_id();
400
401 let mut parse_status = buffer.parse_status();
402 let snapshot_task = cx.spawn({
403 let weak_buffer = buffer_entity.downgrade();
404 async move |_, cx| {
405 while *parse_status.borrow() != language::ParseStatus::Idle {
406 parse_status.changed().await?;
407 }
408 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
409 }
410 });
411
412 let state = Arc::downgrade(&self.state);
413 let task = cx.background_spawn(async move {
414 // TODO: How to handle errors?
415 let Ok(snapshot) = snapshot_task.await else {
416 return;
417 };
418 let rope = snapshot.text.as_rope();
419
420 let declarations = declarations_in_buffer(&snapshot)
421 .into_iter()
422 .map(|item| {
423 (
424 item.parent_index,
425 BufferDeclaration::from_outline(item, &rope),
426 )
427 })
428 .collect::<Vec<_>>();
429
430 let Some(state) = state.upgrade() else {
431 return;
432 };
433 let mut state = state.lock().await;
434 let state = state.deref_mut();
435
436 let buffer_state = state
437 .buffers
438 .entry(buffer_id)
439 .or_insert_with(Default::default);
440
441 SyntaxIndexState::remove_buffer_declarations(
442 &buffer_state.declarations,
443 &mut state.declarations,
444 &mut state.identifiers,
445 );
446
447 let mut new_ids = Vec::with_capacity(declarations.len());
448 state.declarations.reserve(declarations.len());
449 for (parent_index, mut declaration) in declarations {
450 declaration.parent =
451 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
452
453 let identifier = declaration.identifier.clone();
454 let declaration_id = state.declarations.insert(Declaration::Buffer {
455 rope: rope.clone(),
456 buffer_id,
457 declaration,
458 project_entry_id,
459 cached_path: cached_path.clone(),
460 });
461 new_ids.push(declaration_id);
462
463 state
464 .identifiers
465 .entry(identifier)
466 .or_default()
467 .insert(declaration_id);
468 }
469
470 buffer_state.declarations = new_ids;
471 });
472
473 self.with_state(cx, move |state| {
474 state
475 .buffers
476 .entry(buffer_id)
477 .or_insert_with(Default::default)
478 .task = Some(task)
479 });
480 }
481
482 fn update_file(
483 &mut self,
484 entry_id: ProjectEntryId,
485 project_path: ProjectPath,
486 cx: &mut Context<Self>,
487 ) -> Task<()> {
488 let Some(project) = self.project.upgrade() else {
489 return Task::ready(());
490 };
491 let project = project.read(cx);
492
493 let language_registry = project.languages();
494 let Some(available_language) =
495 language_registry.language_for_file_path(project_path.path.as_std_path())
496 else {
497 return Task::ready(());
498 };
499 let language = if let Some(Ok(Ok(language))) = language_registry
500 .load_language(&available_language)
501 .now_or_never()
502 {
503 if language
504 .grammar()
505 .is_none_or(|grammar| grammar.outline_config.is_none())
506 {
507 return Task::ready(());
508 }
509 future::Either::Left(async { Ok(language) })
510 } else {
511 let language_registry = language_registry.clone();
512 future::Either::Right(async move {
513 anyhow::Ok(
514 language_registry
515 .load_language(&available_language)
516 .await??,
517 )
518 })
519 };
520
521 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
522 return Task::ready(());
523 };
524
525 let snapshot_task = worktree.update(cx, |worktree, cx| {
526 let load_task = worktree.load_file(&project_path.path, &Default::default(), None, cx);
527 let worktree_abs_path = worktree.abs_path();
528
529 cx.spawn(async move |_this, cx| {
530 let loaded_file = load_task.await?;
531 let language = language.await?;
532
533 let buffer = cx.new(|cx| {
534 let mut buffer = Buffer::local(loaded_file.text, cx);
535 buffer.set_language(Some(language.clone()), cx);
536 buffer
537 })?;
538
539 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
540 while *parse_status.borrow() != language::ParseStatus::Idle {
541 parse_status.changed().await?;
542 }
543
544 let cached_path = CachedDeclarationPath::new(
545 worktree_abs_path,
546 &project_path.path,
547 Some(&language),
548 );
549
550 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
551
552 anyhow::Ok((snapshot, cached_path))
553 })
554 });
555
556 let state = Arc::downgrade(&self.state);
557 cx.background_spawn(async move {
558 // TODO: How to handle errors?
559 let Ok((snapshot, cached_path)) = snapshot_task.await else {
560 return;
561 };
562 let rope = snapshot.as_rope();
563 let declarations = declarations_in_buffer(&snapshot)
564 .into_iter()
565 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
566 .collect::<Vec<_>>();
567
568 let Some(state) = state.upgrade() else {
569 return;
570 };
571 let mut state = state.lock().await;
572 let state = state.deref_mut();
573
574 let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
575 for old_declaration_id in &file_state.declarations {
576 let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
577 debug_panic!("declaration not found");
578 continue;
579 };
580 if let Some(identifier_declarations) =
581 state.identifiers.get_mut(declaration.identifier())
582 {
583 identifier_declarations.remove(old_declaration_id);
584 }
585 }
586
587 let mut new_ids = Vec::with_capacity(declarations.len());
588 state.declarations.reserve(declarations.len());
589 for (parent_index, mut declaration) in declarations {
590 declaration.parent =
591 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
592
593 let identifier = declaration.identifier.clone();
594 let declaration_id = state.declarations.insert(Declaration::File {
595 project_entry_id: entry_id,
596 declaration,
597 cached_path: cached_path.clone(),
598 });
599 new_ids.push(declaration_id);
600
601 state
602 .identifiers
603 .entry(identifier)
604 .or_default()
605 .insert(declaration_id);
606 }
607 file_state.declarations = new_ids;
608 })
609 }
610}
611
612impl SyntaxIndexState {
613 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
614 self.declarations.get(id)
615 }
616
617 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
618 ///
619 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
620 pub fn declarations_for_identifier<const N: usize>(
621 &self,
622 identifier: &Identifier,
623 ) -> Vec<(DeclarationId, &Declaration)> {
624 // make sure to not have a large stack allocation
625 assert!(N < 32);
626
627 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
628 return vec![];
629 };
630
631 let mut result = Vec::with_capacity(N);
632 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
633 let mut file_declarations = Vec::new();
634
635 for declaration_id in declaration_ids {
636 let declaration = self.declarations.get(*declaration_id);
637 let Some(declaration) = some_or_debug_panic(declaration) else {
638 continue;
639 };
640 match declaration {
641 Declaration::Buffer {
642 project_entry_id, ..
643 } => {
644 included_buffer_entry_ids.push(*project_entry_id);
645 result.push((*declaration_id, declaration));
646 if result.len() == N {
647 return Vec::new();
648 }
649 }
650 Declaration::File {
651 project_entry_id, ..
652 } => {
653 if !included_buffer_entry_ids.contains(&project_entry_id) {
654 file_declarations.push((*declaration_id, declaration));
655 }
656 }
657 }
658 }
659
660 for (declaration_id, declaration) in file_declarations {
661 match declaration {
662 Declaration::File {
663 project_entry_id, ..
664 } => {
665 if !included_buffer_entry_ids.contains(&project_entry_id) {
666 result.push((declaration_id, declaration));
667
668 if result.len() == N {
669 return Vec::new();
670 }
671 }
672 }
673 Declaration::Buffer { .. } => {}
674 }
675 }
676
677 result
678 }
679
680 pub fn buffer_declarations_containing_range(
681 &self,
682 buffer_id: BufferId,
683 range: Range<usize>,
684 ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
685 let Some(buffer_state) = self.buffers.get(&buffer_id) else {
686 return itertools::Either::Left(iter::empty());
687 };
688
689 let iter = buffer_state
690 .declarations
691 .iter()
692 .filter_map(move |declaration_id| {
693 let Some(declaration) = self
694 .declarations
695 .get(*declaration_id)
696 .and_then(|d| d.as_buffer())
697 else {
698 log::error!("bug: missing buffer outline declaration");
699 return None;
700 };
701 if declaration.item_range.contains_inclusive(&range) {
702 return Some((*declaration_id, declaration));
703 }
704 return None;
705 });
706 itertools::Either::Right(iter)
707 }
708
709 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
710 match declaration {
711 Declaration::File {
712 project_entry_id, ..
713 } => self
714 .files
715 .get(project_entry_id)
716 .map(|file_state| file_state.declarations.len())
717 .unwrap_or_default(),
718 Declaration::Buffer { buffer_id, .. } => self
719 .buffers
720 .get(buffer_id)
721 .map(|buffer_state| buffer_state.declarations.len())
722 .unwrap_or_default(),
723 }
724 }
725
726 fn remove_buffer_declarations(
727 old_declaration_ids: &[DeclarationId],
728 declarations: &mut SlotMap<DeclarationId, Declaration>,
729 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
730 ) {
731 for old_declaration_id in old_declaration_ids {
732 let Some(declaration) = declarations.remove(*old_declaration_id) else {
733 debug_panic!("declaration not found");
734 continue;
735 };
736 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
737 identifier_declarations.remove(old_declaration_id);
738 }
739 }
740 }
741}
742
743#[cfg(test)]
744mod tests {
745 use super::*;
746 use std::sync::Arc;
747
748 use gpui::TestAppContext;
749 use indoc::indoc;
750 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
751 use project::{FakeFs, Project};
752 use serde_json::json;
753 use settings::SettingsStore;
754 use text::OffsetRangeExt as _;
755 use util::{path, rel_path::rel_path};
756
757 use crate::syntax_index::SyntaxIndex;
758
759 #[gpui::test]
760 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
761 let (project, index, rust_lang_id) = init_test(cx).await;
762 let main = Identifier {
763 name: "main".into(),
764 language_id: rust_lang_id,
765 };
766
767 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
768 let index_state = index_state.lock().await;
769 cx.update(|cx| {
770 let decls = index_state.declarations_for_identifier::<8>(&main);
771 assert_eq!(decls.len(), 2);
772
773 let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
774 assert_eq!(decl.identifier, main);
775 assert_eq!(decl.item_range, 0..98);
776
777 let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
778 assert_eq!(decl.identifier, main.clone());
779 assert_eq!(decl.item_range, 32..280);
780 });
781 }
782
783 #[gpui::test]
784 async fn test_parents_in_file(cx: &mut TestAppContext) {
785 let (project, index, rust_lang_id) = init_test(cx).await;
786 let test_process_data = Identifier {
787 name: "test_process_data".into(),
788 language_id: rust_lang_id,
789 };
790
791 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
792 let index_state = index_state.lock().await;
793 cx.update(|cx| {
794 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
795 assert_eq!(decls.len(), 1);
796
797 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
798 assert_eq!(decl.identifier, test_process_data);
799
800 let parent_id = decl.parent.unwrap();
801 let parent = index_state.declaration(parent_id).unwrap();
802 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
803 assert_eq!(
804 parent_decl.identifier,
805 Identifier {
806 name: "tests".into(),
807 language_id: rust_lang_id
808 }
809 );
810 assert_eq!(parent_decl.parent, None);
811 });
812 }
813
814 #[gpui::test]
815 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
816 let (project, index, rust_lang_id) = init_test(cx).await;
817 let test_process_data = Identifier {
818 name: "test_process_data".into(),
819 language_id: rust_lang_id,
820 };
821
822 let buffer = project
823 .update(cx, |project, cx| {
824 let project_path = project.find_project_path("c.rs", cx).unwrap();
825 project.open_buffer(project_path, cx)
826 })
827 .await
828 .unwrap();
829
830 cx.run_until_parked();
831
832 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
833 let index_state = index_state.lock().await;
834 cx.update(|cx| {
835 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
836 assert_eq!(decls.len(), 1);
837
838 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
839 assert_eq!(decl.identifier, test_process_data);
840
841 let parent_id = decl.parent.unwrap();
842 let parent = index_state.declaration(parent_id).unwrap();
843 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
844 assert_eq!(
845 parent_decl.identifier,
846 Identifier {
847 name: "tests".into(),
848 language_id: rust_lang_id
849 }
850 );
851 assert_eq!(parent_decl.parent, None);
852 });
853
854 drop(buffer);
855 }
856
857 #[gpui::test]
858 async fn test_declarations_limit(cx: &mut TestAppContext) {
859 let (_, index, rust_lang_id) = init_test(cx).await;
860
861 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
862 let index_state = index_state.lock().await;
863 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
864 name: "main".into(),
865 language_id: rust_lang_id,
866 });
867 assert_eq!(decls.len(), 0);
868 }
869
870 #[gpui::test]
871 async fn test_buffer_shadow(cx: &mut TestAppContext) {
872 let (project, index, rust_lang_id) = init_test(cx).await;
873
874 let main = Identifier {
875 name: "main".into(),
876 language_id: rust_lang_id,
877 };
878
879 let buffer = project
880 .update(cx, |project, cx| {
881 let project_path = project.find_project_path("c.rs", cx).unwrap();
882 project.open_buffer(project_path, cx)
883 })
884 .await
885 .unwrap();
886
887 cx.run_until_parked();
888
889 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
890 {
891 let index_state = index_state_arc.lock().await;
892
893 cx.update(|cx| {
894 let decls = index_state.declarations_for_identifier::<8>(&main);
895 assert_eq!(decls.len(), 2);
896 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
897 assert_eq!(decl.identifier, main);
898 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
899
900 expect_file_decl("a.rs", &decls[1].1, &project, cx);
901 });
902 }
903
904 // Drop the buffer and wait for release
905 cx.update(|_| {
906 drop(buffer);
907 });
908 cx.run_until_parked();
909
910 let index_state = index_state_arc.lock().await;
911
912 cx.update(|cx| {
913 let decls = index_state.declarations_for_identifier::<8>(&main);
914 assert_eq!(decls.len(), 2);
915 expect_file_decl("a.rs", &decls[0].1, &project, cx);
916 expect_file_decl("c.rs", &decls[1].1, &project, cx);
917 });
918 }
919
920 fn expect_buffer_decl<'a>(
921 path: &str,
922 declaration: &'a Declaration,
923 project: &Entity<Project>,
924 cx: &App,
925 ) -> &'a BufferDeclaration {
926 if let Declaration::Buffer {
927 declaration,
928 project_entry_id,
929 ..
930 } = declaration
931 {
932 let project_path = project
933 .read(cx)
934 .path_for_entry(*project_entry_id, cx)
935 .unwrap();
936 assert_eq!(project_path.path.as_ref(), rel_path(path),);
937 declaration
938 } else {
939 panic!("Expected a buffer declaration, found {:?}", declaration);
940 }
941 }
942
943 fn expect_file_decl<'a>(
944 path: &str,
945 declaration: &'a Declaration,
946 project: &Entity<Project>,
947 cx: &App,
948 ) -> &'a FileDeclaration {
949 if let Declaration::File {
950 declaration,
951 project_entry_id: file,
952 ..
953 } = declaration
954 {
955 assert_eq!(
956 project
957 .read(cx)
958 .path_for_entry(*file, cx)
959 .unwrap()
960 .path
961 .as_ref(),
962 rel_path(path),
963 );
964 declaration
965 } else {
966 panic!("Expected a file declaration, found {:?}", declaration);
967 }
968 }
969
970 async fn init_test(
971 cx: &mut TestAppContext,
972 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
973 cx.update(|cx| {
974 let settings_store = SettingsStore::test(cx);
975 cx.set_global(settings_store);
976 language::init(cx);
977 Project::init_settings(cx);
978 });
979
980 let fs = FakeFs::new(cx.executor());
981 fs.insert_tree(
982 path!("/root"),
983 json!({
984 "a.rs": indoc! {r#"
985 fn main() {
986 let x = 1;
987 let y = 2;
988 let z = add(x, y);
989 println!("Result: {}", z);
990 }
991
992 fn add(a: i32, b: i32) -> i32 {
993 a + b
994 }
995 "#},
996 "b.rs": indoc! {"
997 pub struct Config {
998 pub name: String,
999 pub value: i32,
1000 }
1001
1002 impl Config {
1003 pub fn new(name: String, value: i32) -> Self {
1004 Config { name, value }
1005 }
1006 }
1007 "},
1008 "c.rs": indoc! {r#"
1009 use std::collections::HashMap;
1010
1011 fn main() {
1012 let args: Vec<String> = std::env::args().collect();
1013 let data: Vec<i32> = args[1..]
1014 .iter()
1015 .filter_map(|s| s.parse().ok())
1016 .collect();
1017 let result = process_data(data);
1018 println!("{:?}", result);
1019 }
1020
1021 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
1022 let mut counts = HashMap::new();
1023 for value in data {
1024 *counts.entry(value).or_insert(0) += 1;
1025 }
1026 counts
1027 }
1028
1029 #[cfg(test)]
1030 mod tests {
1031 use super::*;
1032
1033 #[test]
1034 fn test_process_data() {
1035 let data = vec![1, 2, 2, 3];
1036 let result = process_data(data);
1037 assert_eq!(result.get(&2), Some(&2));
1038 }
1039 }
1040 "#}
1041 }),
1042 )
1043 .await;
1044 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1045 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1046 let lang = rust_lang();
1047 let lang_id = lang.id();
1048 language_registry.add(Arc::new(lang));
1049
1050 let file_indexing_parallelism = 2;
1051 let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1052 cx.run_until_parked();
1053
1054 (project, index, lang_id)
1055 }
1056
1057 fn rust_lang() -> Language {
1058 Language::new(
1059 LanguageConfig {
1060 name: "Rust".into(),
1061 matcher: LanguageMatcher {
1062 path_suffixes: vec!["rs".to_string()],
1063 ..Default::default()
1064 },
1065 ..Default::default()
1066 },
1067 Some(tree_sitter_rust::LANGUAGE.into()),
1068 )
1069 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1070 .unwrap()
1071 }
1072}