1use anyhow::{Result, anyhow};
2use collections::{HashMap, HashSet};
3use futures::channel::mpsc;
4use futures::lock::Mutex;
5use futures::{FutureExt as _, StreamExt, future};
6use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
7use itertools::Itertools;
8
9use language::{Buffer, BufferEvent};
10use postage::stream::Stream as _;
11use project::buffer_store::{BufferStore, BufferStoreEvent};
12use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
13use project::{PathChange, Project, ProjectEntryId, ProjectPath};
14use slotmap::SlotMap;
15use std::iter;
16use std::ops::{DerefMut, Range};
17use std::sync::Arc;
18use text::BufferId;
19use util::{RangeExt as _, debug_panic, some_or_debug_panic};
20
21use crate::CachedDeclarationPath;
22use crate::declaration::{
23 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
24};
25use crate::outline::declarations_in_buffer;
26
27// TODO
28//
29// * Also queue / debounce buffer changes. A challenge for this is that use of
30// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
31//
32// * Add a per language configuration for skipping indexing.
33//
34// * Handle tsx / ts / js referencing each-other
35
36// Potential future improvements:
37//
38// * Prevent indexing of a large file from blocking the queue.
39//
40// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
41// references are present and their scores.
42//
43// * Include single-file worktrees / non visible worktrees? E.g. go to definition that resolves to a
44// file in a build dependency. Should not be editable in that case - but how to distinguish the case
45// where it should be editable?
46
47// Potential future optimizations:
48//
49// * Index files on multiple threads in Zed (currently only parallel for the CLI). Adding some kind
50// of priority system to the background executor could help - it's single threaded for now to avoid
51// interfering with other work.
52//
53// * Parse files directly instead of loading into a Rope.
54//
55// - This would allow the task handling dirty_files to be done entirely on the background executor.
56//
57// - Make SyntaxMap generic to handle embedded languages? Will also need to find line boundaries,
58// but that can be done by scanning characters in the flat representation.
59//
60// * Use something similar to slotmap without key versions.
61//
62// * Concurrent slotmap
63
64pub struct SyntaxIndex {
65 state: Arc<Mutex<SyntaxIndexState>>,
66 project: WeakEntity<Project>,
67 initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
68 _file_indexing_task: Option<Task<()>>,
69}
70
71pub struct SyntaxIndexState {
72 declarations: SlotMap<DeclarationId, Declaration>,
73 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
74 files: HashMap<ProjectEntryId, FileState>,
75 buffers: HashMap<BufferId, BufferState>,
76 dirty_files: HashMap<ProjectEntryId, ProjectPath>,
77 dirty_files_tx: mpsc::Sender<()>,
78}
79
80#[derive(Debug, Default)]
81struct FileState {
82 declarations: Vec<DeclarationId>,
83}
84
85#[derive(Default)]
86struct BufferState {
87 declarations: Vec<DeclarationId>,
88 task: Option<Task<()>>,
89}
90
91impl SyntaxIndex {
92 pub fn new(
93 project: &Entity<Project>,
94 file_indexing_parallelism: usize,
95 cx: &mut Context<Self>,
96 ) -> Self {
97 assert!(file_indexing_parallelism > 0);
98 let (dirty_files_tx, mut dirty_files_rx) = mpsc::channel::<()>(1);
99 let (mut initial_file_indexing_done_tx, initial_file_indexing_done_rx) =
100 postage::watch::channel();
101
102 let initial_state = SyntaxIndexState {
103 declarations: SlotMap::default(),
104 identifiers: HashMap::default(),
105 files: HashMap::default(),
106 buffers: HashMap::default(),
107 dirty_files: HashMap::default(),
108 dirty_files_tx,
109 };
110 let mut this = Self {
111 project: project.downgrade(),
112 state: Arc::new(Mutex::new(initial_state)),
113 initial_file_indexing_done_rx,
114 _file_indexing_task: None,
115 };
116
117 let worktree_store = project.read(cx).worktree_store();
118 let initial_worktree_snapshots = worktree_store
119 .read(cx)
120 .worktrees()
121 .map(|w| w.read(cx).snapshot())
122 .collect::<Vec<_>>();
123 this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
124 let snapshots_file_count = initial_worktree_snapshots
125 .iter()
126 .map(|worktree| worktree.file_count())
127 .sum::<usize>();
128 if snapshots_file_count > 0 {
129 let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
130 let chunk_count = snapshots_file_count.div_ceil(chunk_size);
131 let file_chunks = initial_worktree_snapshots
132 .iter()
133 .flat_map(|worktree| {
134 let worktree_id = worktree.id();
135 worktree.files(false, 0).map(move |entry| {
136 (
137 entry.id,
138 ProjectPath {
139 worktree_id,
140 path: entry.path.clone(),
141 },
142 )
143 })
144 })
145 .chunks(chunk_size);
146
147 let mut tasks = Vec::with_capacity(chunk_count);
148 for chunk in file_chunks.into_iter() {
149 tasks.push(Self::update_dirty_files(
150 &this,
151 chunk.into_iter().collect(),
152 cx.clone(),
153 ));
154 }
155 futures::future::join_all(tasks).await;
156 log::info!("Finished initial file indexing");
157 }
158
159 *initial_file_indexing_done_tx.borrow_mut() = true;
160
161 let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
162 return;
163 };
164 while dirty_files_rx.next().await.is_some() {
165 let Some(state) = state.upgrade() else {
166 return;
167 };
168 let mut state = state.lock().await;
169 let was_underused = state.dirty_files.capacity() > 255
170 && state.dirty_files.len() * 8 < state.dirty_files.capacity();
171 let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
172 if was_underused {
173 state.dirty_files.shrink_to_fit();
174 }
175 drop(state);
176 if dirty_files.is_empty() {
177 continue;
178 }
179
180 let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
181 let chunk_count = dirty_files.len().div_ceil(chunk_size);
182 let mut tasks = Vec::with_capacity(chunk_count);
183 let chunks = dirty_files.into_iter().chunks(chunk_size);
184 for chunk in chunks.into_iter() {
185 tasks.push(Self::update_dirty_files(
186 &this,
187 chunk.into_iter().collect(),
188 cx.clone(),
189 ));
190 }
191 futures::future::join_all(tasks).await;
192 }
193 }));
194
195 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
196 .detach();
197
198 let buffer_store = project.read(cx).buffer_store().clone();
199 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
200 this.register_buffer(&buffer, cx);
201 }
202 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
203 .detach();
204
205 this
206 }
207
208 async fn update_dirty_files(
209 this: &WeakEntity<Self>,
210 dirty_files: Vec<(ProjectEntryId, ProjectPath)>,
211 mut cx: AsyncApp,
212 ) {
213 for (entry_id, project_path) in dirty_files {
214 let Ok(task) = this.update(&mut cx, |this, cx| {
215 this.update_file(entry_id, project_path, cx)
216 }) else {
217 return;
218 };
219 task.await;
220 }
221 }
222
223 pub fn wait_for_initial_file_indexing(&self, cx: &App) -> Task<Result<()>> {
224 if *self.initial_file_indexing_done_rx.borrow() {
225 Task::ready(Ok(()))
226 } else {
227 let mut rx = self.initial_file_indexing_done_rx.clone();
228 cx.background_spawn(async move {
229 loop {
230 match rx.recv().await {
231 Some(true) => return Ok(()),
232 Some(false) => {}
233 None => {
234 return Err(anyhow!(
235 "SyntaxIndex dropped while waiting for initial file indexing"
236 ));
237 }
238 }
239 }
240 })
241 }
242 }
243
244 pub fn indexed_file_paths(&self, cx: &App) -> Task<Vec<ProjectPath>> {
245 let state = self.state.clone();
246 let project = self.project.clone();
247
248 cx.spawn(async move |cx| {
249 let state = state.lock().await;
250 let Some(project) = project.upgrade() else {
251 return vec![];
252 };
253 project
254 .read_with(cx, |project, cx| {
255 state
256 .files
257 .keys()
258 .filter_map(|entry_id| project.path_for_entry(*entry_id, cx))
259 .collect()
260 })
261 .unwrap_or_default()
262 })
263 }
264
265 fn handle_worktree_store_event(
266 &mut self,
267 _worktree_store: Entity<WorktreeStore>,
268 event: &WorktreeStoreEvent,
269 cx: &mut Context<Self>,
270 ) {
271 use WorktreeStoreEvent::*;
272 match event {
273 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
274 let state = Arc::downgrade(&self.state);
275 let worktree_id = *worktree_id;
276 let updated_entries_set = updated_entries_set.clone();
277 cx.background_spawn(async move {
278 let Some(state) = state.upgrade() else { return };
279 let mut state = state.lock().await;
280 for (path, entry_id, path_change) in updated_entries_set.iter() {
281 if let PathChange::Removed = path_change {
282 state.files.remove(entry_id);
283 state.dirty_files.remove(entry_id);
284 } else {
285 let project_path = ProjectPath {
286 worktree_id,
287 path: path.clone(),
288 };
289 state.dirty_files.insert(*entry_id, project_path);
290 }
291 }
292 match state.dirty_files_tx.try_send(()) {
293 Err(err) if err.is_disconnected() => {
294 log::error!("bug: syntax indexing queue is disconnected");
295 }
296 _ => {}
297 }
298 })
299 .detach();
300 }
301 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
302 let project_entry_id = *project_entry_id;
303 self.with_state(cx, move |state| {
304 state.files.remove(&project_entry_id);
305 })
306 }
307 _ => {}
308 }
309 }
310
311 fn handle_buffer_store_event(
312 &mut self,
313 _buffer_store: Entity<BufferStore>,
314 event: &BufferStoreEvent,
315 cx: &mut Context<Self>,
316 ) {
317 use BufferStoreEvent::*;
318 match event {
319 BufferAdded(buffer) => self.register_buffer(buffer, cx),
320 BufferOpened { .. }
321 | BufferChangedFilePath { .. }
322 | BufferDropped { .. }
323 | SharedBufferClosed { .. } => {}
324 }
325 }
326
327 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
328 &self.state
329 }
330
331 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
332 if let Some(mut state) = self.state.try_lock() {
333 f(&mut state);
334 return;
335 }
336 let state = Arc::downgrade(&self.state);
337 cx.background_spawn(async move {
338 let Some(state) = state.upgrade() else {
339 return;
340 };
341 let mut state = state.lock().await;
342 f(&mut state)
343 })
344 .detach();
345 }
346
347 fn register_buffer(&self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
348 let buffer_id = buffer.read(cx).remote_id();
349 cx.observe_release(buffer, move |this, _buffer, cx| {
350 this.with_state(cx, move |state| {
351 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
352 SyntaxIndexState::remove_buffer_declarations(
353 &buffer_state.declarations,
354 &mut state.declarations,
355 &mut state.identifiers,
356 );
357 }
358 })
359 })
360 .detach();
361 cx.subscribe(buffer, Self::handle_buffer_event).detach();
362
363 self.update_buffer(buffer.clone(), cx);
364 }
365
366 fn handle_buffer_event(
367 &mut self,
368 buffer: Entity<Buffer>,
369 event: &BufferEvent,
370 cx: &mut Context<Self>,
371 ) {
372 match event {
373 BufferEvent::Edited |
374 // paths are cached and so should be updated
375 BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
376 _ => {}
377 }
378 }
379
380 fn update_buffer(&self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
381 let buffer = buffer_entity.read(cx);
382 if buffer.language().is_none() {
383 return;
384 }
385
386 let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
387 .and_then(|f| {
388 let project_entry_id = f.project_entry_id()?;
389 let cached_path = CachedDeclarationPath::new(
390 f.worktree.read(cx).abs_path(),
391 &f.path,
392 buffer.language(),
393 );
394 Some((project_entry_id, cached_path))
395 })
396 else {
397 return;
398 };
399 let buffer_id = buffer.remote_id();
400
401 let mut parse_status = buffer.parse_status();
402 let snapshot_task = cx.spawn({
403 let weak_buffer = buffer_entity.downgrade();
404 async move |_, cx| {
405 while *parse_status.borrow() != language::ParseStatus::Idle {
406 parse_status.changed().await?;
407 }
408 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
409 }
410 });
411
412 let state = Arc::downgrade(&self.state);
413 let task = cx.background_spawn(async move {
414 // TODO: How to handle errors?
415 let Ok(snapshot) = snapshot_task.await else {
416 return;
417 };
418 let rope = snapshot.text.as_rope();
419
420 let declarations = declarations_in_buffer(&snapshot)
421 .into_iter()
422 .map(|item| {
423 (
424 item.parent_index,
425 BufferDeclaration::from_outline(item, &rope),
426 )
427 })
428 .collect::<Vec<_>>();
429
430 let Some(state) = state.upgrade() else {
431 return;
432 };
433 let mut state = state.lock().await;
434 let state = state.deref_mut();
435
436 let buffer_state = state
437 .buffers
438 .entry(buffer_id)
439 .or_insert_with(Default::default);
440
441 SyntaxIndexState::remove_buffer_declarations(
442 &buffer_state.declarations,
443 &mut state.declarations,
444 &mut state.identifiers,
445 );
446
447 let mut new_ids = Vec::with_capacity(declarations.len());
448 state.declarations.reserve(declarations.len());
449 for (parent_index, mut declaration) in declarations {
450 declaration.parent =
451 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
452
453 let identifier = declaration.identifier.clone();
454 let declaration_id = state.declarations.insert(Declaration::Buffer {
455 rope: rope.clone(),
456 buffer_id,
457 declaration,
458 project_entry_id,
459 cached_path: cached_path.clone(),
460 });
461 new_ids.push(declaration_id);
462
463 state
464 .identifiers
465 .entry(identifier)
466 .or_default()
467 .insert(declaration_id);
468 }
469
470 buffer_state.declarations = new_ids;
471 });
472
473 self.with_state(cx, move |state| {
474 state
475 .buffers
476 .entry(buffer_id)
477 .or_insert_with(Default::default)
478 .task = Some(task)
479 });
480 }
481
482 fn update_file(
483 &mut self,
484 entry_id: ProjectEntryId,
485 project_path: ProjectPath,
486 cx: &mut Context<Self>,
487 ) -> Task<()> {
488 let Some(project) = self.project.upgrade() else {
489 return Task::ready(());
490 };
491 let project = project.read(cx);
492
493 let language_registry = project.languages();
494 let Some(available_language) =
495 language_registry.language_for_file_path(project_path.path.as_std_path())
496 else {
497 return Task::ready(());
498 };
499 let language = if let Some(Ok(Ok(language))) = language_registry
500 .load_language(&available_language)
501 .now_or_never()
502 {
503 if language
504 .grammar()
505 .is_none_or(|grammar| grammar.outline_config.is_none())
506 {
507 return Task::ready(());
508 }
509 future::Either::Left(async { Ok(language) })
510 } else {
511 let language_registry = language_registry.clone();
512 future::Either::Right(async move {
513 anyhow::Ok(
514 language_registry
515 .load_language(&available_language)
516 .await??,
517 )
518 })
519 };
520
521 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
522 return Task::ready(());
523 };
524
525 let snapshot_task = worktree.update(cx, |worktree, cx| {
526 let load_task = worktree.load_file(&project_path.path, None, false, true, None, cx);
527 cx.spawn(async move |_this, cx| {
528 let loaded_file = load_task.await?;
529 let language = language.await?;
530
531 let buffer = cx.new(|cx| {
532 let mut buffer = Buffer::local(loaded_file.text, cx);
533 buffer.set_language(Some(language.clone()), cx);
534 buffer
535 })?;
536
537 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
538 while *parse_status.borrow() != language::ParseStatus::Idle {
539 parse_status.changed().await?;
540 }
541
542 let cached_path = CachedDeclarationPath::new(
543 worktree_abs_path,
544 &project_path.path,
545 Some(&language),
546 );
547
548 let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
549
550 anyhow::Ok((snapshot, cached_path))
551 })
552 });
553
554 let state = Arc::downgrade(&self.state);
555 cx.background_spawn(async move {
556 // TODO: How to handle errors?
557 let Ok((snapshot, cached_path)) = snapshot_task.await else {
558 return;
559 };
560 let rope = snapshot.as_rope();
561 let declarations = declarations_in_buffer(&snapshot)
562 .into_iter()
563 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
564 .collect::<Vec<_>>();
565
566 let Some(state) = state.upgrade() else {
567 return;
568 };
569 let mut state = state.lock().await;
570 let state = state.deref_mut();
571
572 let file_state = state.files.entry(entry_id).or_insert_with(Default::default);
573 for old_declaration_id in &file_state.declarations {
574 let Some(declaration) = state.declarations.remove(*old_declaration_id) else {
575 debug_panic!("declaration not found");
576 continue;
577 };
578 if let Some(identifier_declarations) =
579 state.identifiers.get_mut(declaration.identifier())
580 {
581 identifier_declarations.remove(old_declaration_id);
582 }
583 }
584
585 let mut new_ids = Vec::with_capacity(declarations.len());
586 state.declarations.reserve(declarations.len());
587 for (parent_index, mut declaration) in declarations {
588 declaration.parent =
589 parent_index.and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
590
591 let identifier = declaration.identifier.clone();
592 let declaration_id = state.declarations.insert(Declaration::File {
593 project_entry_id: entry_id,
594 declaration,
595 cached_path: cached_path.clone(),
596 });
597 new_ids.push(declaration_id);
598
599 state
600 .identifiers
601 .entry(identifier)
602 .or_default()
603 .insert(declaration_id);
604 }
605 file_state.declarations = new_ids;
606 })
607 }
608}
609
610impl SyntaxIndexState {
611 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
612 self.declarations.get(id)
613 }
614
615 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
616 ///
617 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
618 pub fn declarations_for_identifier<const N: usize>(
619 &self,
620 identifier: &Identifier,
621 ) -> Vec<(DeclarationId, &Declaration)> {
622 // make sure to not have a large stack allocation
623 assert!(N < 32);
624
625 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
626 return vec![];
627 };
628
629 let mut result = Vec::with_capacity(N);
630 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
631 let mut file_declarations = Vec::new();
632
633 for declaration_id in declaration_ids {
634 let declaration = self.declarations.get(*declaration_id);
635 let Some(declaration) = some_or_debug_panic(declaration) else {
636 continue;
637 };
638 match declaration {
639 Declaration::Buffer {
640 project_entry_id, ..
641 } => {
642 included_buffer_entry_ids.push(*project_entry_id);
643 result.push((*declaration_id, declaration));
644 if result.len() == N {
645 return Vec::new();
646 }
647 }
648 Declaration::File {
649 project_entry_id, ..
650 } => {
651 if !included_buffer_entry_ids.contains(&project_entry_id) {
652 file_declarations.push((*declaration_id, declaration));
653 }
654 }
655 }
656 }
657
658 for (declaration_id, declaration) in file_declarations {
659 match declaration {
660 Declaration::File {
661 project_entry_id, ..
662 } => {
663 if !included_buffer_entry_ids.contains(&project_entry_id) {
664 result.push((declaration_id, declaration));
665
666 if result.len() == N {
667 return Vec::new();
668 }
669 }
670 }
671 Declaration::Buffer { .. } => {}
672 }
673 }
674
675 result
676 }
677
678 pub fn buffer_declarations_containing_range(
679 &self,
680 buffer_id: BufferId,
681 range: Range<usize>,
682 ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
683 let Some(buffer_state) = self.buffers.get(&buffer_id) else {
684 return itertools::Either::Left(iter::empty());
685 };
686
687 let iter = buffer_state
688 .declarations
689 .iter()
690 .filter_map(move |declaration_id| {
691 let Some(declaration) = self
692 .declarations
693 .get(*declaration_id)
694 .and_then(|d| d.as_buffer())
695 else {
696 log::error!("bug: missing buffer outline declaration");
697 return None;
698 };
699 if declaration.item_range.contains_inclusive(&range) {
700 return Some((*declaration_id, declaration));
701 }
702 return None;
703 });
704 itertools::Either::Right(iter)
705 }
706
707 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
708 match declaration {
709 Declaration::File {
710 project_entry_id, ..
711 } => self
712 .files
713 .get(project_entry_id)
714 .map(|file_state| file_state.declarations.len())
715 .unwrap_or_default(),
716 Declaration::Buffer { buffer_id, .. } => self
717 .buffers
718 .get(buffer_id)
719 .map(|buffer_state| buffer_state.declarations.len())
720 .unwrap_or_default(),
721 }
722 }
723
724 fn remove_buffer_declarations(
725 old_declaration_ids: &[DeclarationId],
726 declarations: &mut SlotMap<DeclarationId, Declaration>,
727 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
728 ) {
729 for old_declaration_id in old_declaration_ids {
730 let Some(declaration) = declarations.remove(*old_declaration_id) else {
731 debug_panic!("declaration not found");
732 continue;
733 };
734 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
735 identifier_declarations.remove(old_declaration_id);
736 }
737 }
738 }
739}
740
741#[cfg(test)]
742mod tests {
743 use super::*;
744 use std::sync::Arc;
745
746 use gpui::TestAppContext;
747 use indoc::indoc;
748 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
749 use project::{FakeFs, Project};
750 use serde_json::json;
751 use settings::SettingsStore;
752 use text::OffsetRangeExt as _;
753 use util::{path, rel_path::rel_path};
754
755 use crate::syntax_index::SyntaxIndex;
756
757 #[gpui::test]
758 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
759 let (project, index, rust_lang_id) = init_test(cx).await;
760 let main = Identifier {
761 name: "main".into(),
762 language_id: rust_lang_id,
763 };
764
765 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
766 let index_state = index_state.lock().await;
767 cx.update(|cx| {
768 let decls = index_state.declarations_for_identifier::<8>(&main);
769 assert_eq!(decls.len(), 2);
770
771 let decl = expect_file_decl("a.rs", &decls[0].1, &project, cx);
772 assert_eq!(decl.identifier, main);
773 assert_eq!(decl.item_range, 0..98);
774
775 let decl = expect_file_decl("c.rs", &decls[1].1, &project, cx);
776 assert_eq!(decl.identifier, main.clone());
777 assert_eq!(decl.item_range, 32..280);
778 });
779 }
780
781 #[gpui::test]
782 async fn test_parents_in_file(cx: &mut TestAppContext) {
783 let (project, index, rust_lang_id) = init_test(cx).await;
784 let test_process_data = Identifier {
785 name: "test_process_data".into(),
786 language_id: rust_lang_id,
787 };
788
789 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
790 let index_state = index_state.lock().await;
791 cx.update(|cx| {
792 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
793 assert_eq!(decls.len(), 1);
794
795 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
796 assert_eq!(decl.identifier, test_process_data);
797
798 let parent_id = decl.parent.unwrap();
799 let parent = index_state.declaration(parent_id).unwrap();
800 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
801 assert_eq!(
802 parent_decl.identifier,
803 Identifier {
804 name: "tests".into(),
805 language_id: rust_lang_id
806 }
807 );
808 assert_eq!(parent_decl.parent, None);
809 });
810 }
811
812 #[gpui::test]
813 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
814 let (project, index, rust_lang_id) = init_test(cx).await;
815 let test_process_data = Identifier {
816 name: "test_process_data".into(),
817 language_id: rust_lang_id,
818 };
819
820 let buffer = project
821 .update(cx, |project, cx| {
822 let project_path = project.find_project_path("c.rs", cx).unwrap();
823 project.open_buffer(project_path, cx)
824 })
825 .await
826 .unwrap();
827
828 cx.run_until_parked();
829
830 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
831 let index_state = index_state.lock().await;
832 cx.update(|cx| {
833 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
834 assert_eq!(decls.len(), 1);
835
836 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
837 assert_eq!(decl.identifier, test_process_data);
838
839 let parent_id = decl.parent.unwrap();
840 let parent = index_state.declaration(parent_id).unwrap();
841 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
842 assert_eq!(
843 parent_decl.identifier,
844 Identifier {
845 name: "tests".into(),
846 language_id: rust_lang_id
847 }
848 );
849 assert_eq!(parent_decl.parent, None);
850 });
851
852 drop(buffer);
853 }
854
855 #[gpui::test]
856 async fn test_declarations_limit(cx: &mut TestAppContext) {
857 let (_, index, rust_lang_id) = init_test(cx).await;
858
859 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
860 let index_state = index_state.lock().await;
861 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
862 name: "main".into(),
863 language_id: rust_lang_id,
864 });
865 assert_eq!(decls.len(), 0);
866 }
867
868 #[gpui::test]
869 async fn test_buffer_shadow(cx: &mut TestAppContext) {
870 let (project, index, rust_lang_id) = init_test(cx).await;
871
872 let main = Identifier {
873 name: "main".into(),
874 language_id: rust_lang_id,
875 };
876
877 let buffer = project
878 .update(cx, |project, cx| {
879 let project_path = project.find_project_path("c.rs", cx).unwrap();
880 project.open_buffer(project_path, cx)
881 })
882 .await
883 .unwrap();
884
885 cx.run_until_parked();
886
887 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
888 {
889 let index_state = index_state_arc.lock().await;
890
891 cx.update(|cx| {
892 let decls = index_state.declarations_for_identifier::<8>(&main);
893 assert_eq!(decls.len(), 2);
894 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
895 assert_eq!(decl.identifier, main);
896 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
897
898 expect_file_decl("a.rs", &decls[1].1, &project, cx);
899 });
900 }
901
902 // Drop the buffer and wait for release
903 cx.update(|_| {
904 drop(buffer);
905 });
906 cx.run_until_parked();
907
908 let index_state = index_state_arc.lock().await;
909
910 cx.update(|cx| {
911 let decls = index_state.declarations_for_identifier::<8>(&main);
912 assert_eq!(decls.len(), 2);
913 expect_file_decl("a.rs", &decls[0].1, &project, cx);
914 expect_file_decl("c.rs", &decls[1].1, &project, cx);
915 });
916 }
917
918 fn expect_buffer_decl<'a>(
919 path: &str,
920 declaration: &'a Declaration,
921 project: &Entity<Project>,
922 cx: &App,
923 ) -> &'a BufferDeclaration {
924 if let Declaration::Buffer {
925 declaration,
926 project_entry_id,
927 ..
928 } = declaration
929 {
930 let project_path = project
931 .read(cx)
932 .path_for_entry(*project_entry_id, cx)
933 .unwrap();
934 assert_eq!(project_path.path.as_ref(), rel_path(path),);
935 declaration
936 } else {
937 panic!("Expected a buffer declaration, found {:?}", declaration);
938 }
939 }
940
941 fn expect_file_decl<'a>(
942 path: &str,
943 declaration: &'a Declaration,
944 project: &Entity<Project>,
945 cx: &App,
946 ) -> &'a FileDeclaration {
947 if let Declaration::File {
948 declaration,
949 project_entry_id: file,
950 ..
951 } = declaration
952 {
953 assert_eq!(
954 project
955 .read(cx)
956 .path_for_entry(*file, cx)
957 .unwrap()
958 .path
959 .as_ref(),
960 rel_path(path),
961 );
962 declaration
963 } else {
964 panic!("Expected a file declaration, found {:?}", declaration);
965 }
966 }
967
968 async fn init_test(
969 cx: &mut TestAppContext,
970 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
971 cx.update(|cx| {
972 let settings_store = SettingsStore::test(cx);
973 cx.set_global(settings_store);
974 language::init(cx);
975 Project::init_settings(cx);
976 });
977
978 let fs = FakeFs::new(cx.executor());
979 fs.insert_tree(
980 path!("/root"),
981 json!({
982 "a.rs": indoc! {r#"
983 fn main() {
984 let x = 1;
985 let y = 2;
986 let z = add(x, y);
987 println!("Result: {}", z);
988 }
989
990 fn add(a: i32, b: i32) -> i32 {
991 a + b
992 }
993 "#},
994 "b.rs": indoc! {"
995 pub struct Config {
996 pub name: String,
997 pub value: i32,
998 }
999
1000 impl Config {
1001 pub fn new(name: String, value: i32) -> Self {
1002 Config { name, value }
1003 }
1004 }
1005 "},
1006 "c.rs": indoc! {r#"
1007 use std::collections::HashMap;
1008
1009 fn main() {
1010 let args: Vec<String> = std::env::args().collect();
1011 let data: Vec<i32> = args[1..]
1012 .iter()
1013 .filter_map(|s| s.parse().ok())
1014 .collect();
1015 let result = process_data(data);
1016 println!("{:?}", result);
1017 }
1018
1019 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
1020 let mut counts = HashMap::new();
1021 for value in data {
1022 *counts.entry(value).or_insert(0) += 1;
1023 }
1024 counts
1025 }
1026
1027 #[cfg(test)]
1028 mod tests {
1029 use super::*;
1030
1031 #[test]
1032 fn test_process_data() {
1033 let data = vec![1, 2, 2, 3];
1034 let result = process_data(data);
1035 assert_eq!(result.get(&2), Some(&2));
1036 }
1037 }
1038 "#}
1039 }),
1040 )
1041 .await;
1042 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1043 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1044 let lang = rust_lang();
1045 let lang_id = lang.id();
1046 language_registry.add(Arc::new(lang));
1047
1048 let file_indexing_parallelism = 2;
1049 let index = cx.new(|cx| SyntaxIndex::new(&project, file_indexing_parallelism, cx));
1050 cx.run_until_parked();
1051
1052 (project, index, lang_id)
1053 }
1054
1055 fn rust_lang() -> Language {
1056 Language::new(
1057 LanguageConfig {
1058 name: "Rust".into(),
1059 matcher: LanguageMatcher {
1060 path_suffixes: vec!["rs".to_string()],
1061 ..Default::default()
1062 },
1063 ..Default::default()
1064 },
1065 Some(tree_sitter_rust::LANGUAGE.into()),
1066 )
1067 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
1068 .unwrap()
1069 }
1070}