1use collections::{HashMap, HashSet};
2use futures::lock::Mutex;
3use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
4use language::{Buffer, BufferEvent};
5use project::buffer_store::{BufferStore, BufferStoreEvent};
6use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
7use project::{PathChange, Project, ProjectEntryId, ProjectPath};
8use slotmap::SlotMap;
9use std::iter;
10use std::ops::Range;
11use std::sync::Arc;
12use text::BufferId;
13use util::{RangeExt as _, debug_panic, some_or_debug_panic};
14
15use crate::declaration::{
16 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
17};
18use crate::outline::declarations_in_buffer;
19
20// TODO:
21//
22// * Skip for remote projects
23//
24// * Consider making SyntaxIndex not an Entity.
25
26// Potential future improvements:
27//
28// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
29// references are present and their scores.
30
31// Potential future optimizations:
32//
33// * Cache of buffers for files
34//
35// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
36// languages? Will also need to find line boundaries, but that can be done by scanning characters in
37// the flat representation.
38//
39// * Use something similar to slotmap without key versions.
40//
41// * Concurrent slotmap
42//
43// * Use queue for parsing
44//
45
46pub struct SyntaxIndex {
47 state: Arc<Mutex<SyntaxIndexState>>,
48 project: WeakEntity<Project>,
49}
50
51#[derive(Default)]
52pub struct SyntaxIndexState {
53 declarations: SlotMap<DeclarationId, Declaration>,
54 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
55 files: HashMap<ProjectEntryId, FileState>,
56 buffers: HashMap<BufferId, BufferState>,
57}
58
59#[derive(Debug, Default)]
60struct FileState {
61 declarations: Vec<DeclarationId>,
62 task: Option<Task<()>>,
63}
64
65#[derive(Default)]
66struct BufferState {
67 declarations: Vec<DeclarationId>,
68 task: Option<Task<()>>,
69}
70
71impl SyntaxIndex {
72 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
73 let mut this = Self {
74 project: project.downgrade(),
75 state: Arc::new(Mutex::new(SyntaxIndexState::default())),
76 };
77
78 let worktree_store = project.read(cx).worktree_store();
79 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
80 .detach();
81
82 for worktree in worktree_store
83 .read(cx)
84 .worktrees()
85 .map(|w| w.read(cx).snapshot())
86 .collect::<Vec<_>>()
87 {
88 for entry in worktree.files(false, 0) {
89 this.update_file(
90 entry.id,
91 ProjectPath {
92 worktree_id: worktree.id(),
93 path: entry.path.clone(),
94 },
95 cx,
96 );
97 }
98 }
99
100 let buffer_store = project.read(cx).buffer_store().clone();
101 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
102 this.register_buffer(&buffer, cx);
103 }
104 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
105 .detach();
106
107 this
108 }
109
110 fn handle_worktree_store_event(
111 &mut self,
112 _worktree_store: Entity<WorktreeStore>,
113 event: &WorktreeStoreEvent,
114 cx: &mut Context<Self>,
115 ) {
116 use WorktreeStoreEvent::*;
117 match event {
118 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
119 let state = Arc::downgrade(&self.state);
120 let worktree_id = *worktree_id;
121 let updated_entries_set = updated_entries_set.clone();
122 cx.spawn(async move |this, cx| {
123 let Some(state) = state.upgrade() else { return };
124 for (path, entry_id, path_change) in updated_entries_set.iter() {
125 if let PathChange::Removed = path_change {
126 state.lock().await.files.remove(entry_id);
127 } else {
128 let project_path = ProjectPath {
129 worktree_id,
130 path: path.clone(),
131 };
132 this.update(cx, |this, cx| {
133 this.update_file(*entry_id, project_path, cx);
134 })
135 .ok();
136 }
137 }
138 })
139 .detach();
140 }
141 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
142 let project_entry_id = *project_entry_id;
143 self.with_state(cx, move |state| {
144 state.files.remove(&project_entry_id);
145 })
146 }
147 _ => {}
148 }
149 }
150
151 fn handle_buffer_store_event(
152 &mut self,
153 _buffer_store: Entity<BufferStore>,
154 event: &BufferStoreEvent,
155 cx: &mut Context<Self>,
156 ) {
157 use BufferStoreEvent::*;
158 match event {
159 BufferAdded(buffer) => self.register_buffer(buffer, cx),
160 BufferOpened { .. }
161 | BufferChangedFilePath { .. }
162 | BufferDropped { .. }
163 | SharedBufferClosed { .. } => {}
164 }
165 }
166
167 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
168 &self.state
169 }
170
171 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
172 if let Some(mut state) = self.state.try_lock() {
173 f(&mut state);
174 return;
175 }
176 let state = Arc::downgrade(&self.state);
177 cx.background_spawn(async move {
178 let Some(state) = state.upgrade() else {
179 return;
180 };
181 let mut state = state.lock().await;
182 f(&mut state)
183 })
184 .detach();
185 }
186
187 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
188 let buffer_id = buffer.read(cx).remote_id();
189 cx.observe_release(buffer, move |this, _buffer, cx| {
190 this.with_state(cx, move |state| {
191 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
192 SyntaxIndexState::remove_buffer_declarations(
193 &buffer_state.declarations,
194 &mut state.declarations,
195 &mut state.identifiers,
196 );
197 }
198 })
199 })
200 .detach();
201 cx.subscribe(buffer, Self::handle_buffer_event).detach();
202
203 self.update_buffer(buffer.clone(), cx);
204 }
205
206 fn handle_buffer_event(
207 &mut self,
208 buffer: Entity<Buffer>,
209 event: &BufferEvent,
210 cx: &mut Context<Self>,
211 ) {
212 match event {
213 BufferEvent::Edited => self.update_buffer(buffer, cx),
214 _ => {}
215 }
216 }
217
218 fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
219 let buffer = buffer_entity.read(cx);
220
221 let Some(project_entry_id) =
222 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
223 else {
224 return;
225 };
226 let buffer_id = buffer.remote_id();
227
228 let mut parse_status = buffer.parse_status();
229 let snapshot_task = cx.spawn({
230 let weak_buffer = buffer_entity.downgrade();
231 async move |_, cx| {
232 while *parse_status.borrow() != language::ParseStatus::Idle {
233 parse_status.changed().await?;
234 }
235 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
236 }
237 });
238
239 let parse_task = cx.background_spawn(async move {
240 let snapshot = snapshot_task.await?;
241 let rope = snapshot.text.as_rope().clone();
242
243 anyhow::Ok((
244 declarations_in_buffer(&snapshot)
245 .into_iter()
246 .map(|item| {
247 (
248 item.parent_index,
249 BufferDeclaration::from_outline(item, &rope),
250 )
251 })
252 .collect::<Vec<_>>(),
253 rope,
254 ))
255 });
256
257 let task = cx.spawn({
258 async move |this, cx| {
259 let Ok((declarations, rope)) = parse_task.await else {
260 return;
261 };
262
263 this.update(cx, move |this, cx| {
264 this.with_state(cx, move |state| {
265 let buffer_state = state
266 .buffers
267 .entry(buffer_id)
268 .or_insert_with(Default::default);
269
270 SyntaxIndexState::remove_buffer_declarations(
271 &buffer_state.declarations,
272 &mut state.declarations,
273 &mut state.identifiers,
274 );
275
276 let mut new_ids = Vec::with_capacity(declarations.len());
277 state.declarations.reserve(declarations.len());
278 for (parent_index, mut declaration) in declarations {
279 declaration.parent = parent_index
280 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
281
282 let identifier = declaration.identifier.clone();
283 let declaration_id = state.declarations.insert(Declaration::Buffer {
284 rope: rope.clone(),
285 buffer_id,
286 declaration,
287 project_entry_id,
288 });
289 new_ids.push(declaration_id);
290
291 state
292 .identifiers
293 .entry(identifier)
294 .or_default()
295 .insert(declaration_id);
296 }
297
298 buffer_state.declarations = new_ids;
299 });
300 })
301 .ok();
302 }
303 });
304
305 self.with_state(cx, move |state| {
306 state
307 .buffers
308 .entry(buffer_id)
309 .or_insert_with(Default::default)
310 .task = Some(task)
311 });
312 }
313
314 fn update_file(
315 &mut self,
316 entry_id: ProjectEntryId,
317 project_path: ProjectPath,
318 cx: &mut Context<Self>,
319 ) {
320 let Some(project) = self.project.upgrade() else {
321 return;
322 };
323 let project = project.read(cx);
324 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
325 return;
326 };
327 let language_registry = project.languages().clone();
328
329 let snapshot_task = worktree.update(cx, |worktree, cx| {
330 let load_task = worktree.load_file(&project_path.path, cx);
331 cx.spawn(async move |_this, cx| {
332 let loaded_file = load_task.await?;
333 let language = language_registry
334 .language_for_file_path(&project_path.path)
335 .await
336 .ok();
337
338 let buffer = cx.new(|cx| {
339 let mut buffer = Buffer::local(loaded_file.text, cx);
340 buffer.set_language(language, cx);
341 buffer
342 })?;
343
344 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
345 while *parse_status.borrow() != language::ParseStatus::Idle {
346 parse_status.changed().await?;
347 }
348
349 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
350 })
351 });
352
353 let parse_task = cx.background_spawn(async move {
354 let snapshot = snapshot_task.await?;
355 let rope = snapshot.as_rope();
356 let declarations = declarations_in_buffer(&snapshot)
357 .into_iter()
358 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
359 .collect::<Vec<_>>();
360 anyhow::Ok(declarations)
361 });
362
363 let task = cx.spawn({
364 async move |this, cx| {
365 // TODO: how to handle errors?
366 let Ok(declarations) = parse_task.await else {
367 return;
368 };
369 this.update(cx, |this, cx| {
370 this.with_state(cx, move |state| {
371 let file_state =
372 state.files.entry(entry_id).or_insert_with(Default::default);
373
374 for old_declaration_id in &file_state.declarations {
375 let Some(declaration) = state.declarations.remove(*old_declaration_id)
376 else {
377 debug_panic!("declaration not found");
378 continue;
379 };
380 if let Some(identifier_declarations) =
381 state.identifiers.get_mut(declaration.identifier())
382 {
383 identifier_declarations.remove(old_declaration_id);
384 }
385 }
386
387 let mut new_ids = Vec::with_capacity(declarations.len());
388 state.declarations.reserve(declarations.len());
389
390 for (parent_index, mut declaration) in declarations {
391 declaration.parent = parent_index
392 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
393
394 let identifier = declaration.identifier.clone();
395 let declaration_id = state.declarations.insert(Declaration::File {
396 project_entry_id: entry_id,
397 declaration,
398 });
399 new_ids.push(declaration_id);
400
401 state
402 .identifiers
403 .entry(identifier)
404 .or_default()
405 .insert(declaration_id);
406 }
407
408 file_state.declarations = new_ids;
409 });
410 })
411 .ok();
412 }
413 });
414
415 self.with_state(cx, move |state| {
416 state
417 .files
418 .entry(entry_id)
419 .or_insert_with(Default::default)
420 .task = Some(task);
421 });
422 }
423}
424
425impl SyntaxIndexState {
426 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
427 self.declarations.get(id)
428 }
429
430 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
431 ///
432 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
433 pub fn declarations_for_identifier<const N: usize>(
434 &self,
435 identifier: &Identifier,
436 ) -> Vec<(DeclarationId, &Declaration)> {
437 // make sure to not have a large stack allocation
438 assert!(N < 32);
439
440 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
441 return vec![];
442 };
443
444 let mut result = Vec::with_capacity(N);
445 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
446 let mut file_declarations = Vec::new();
447
448 for declaration_id in declaration_ids {
449 let declaration = self.declarations.get(*declaration_id);
450 let Some(declaration) = some_or_debug_panic(declaration) else {
451 continue;
452 };
453 match declaration {
454 Declaration::Buffer {
455 project_entry_id, ..
456 } => {
457 included_buffer_entry_ids.push(*project_entry_id);
458 result.push((*declaration_id, declaration));
459 if result.len() == N {
460 return Vec::new();
461 }
462 }
463 Declaration::File {
464 project_entry_id, ..
465 } => {
466 if !included_buffer_entry_ids.contains(&project_entry_id) {
467 file_declarations.push((*declaration_id, declaration));
468 }
469 }
470 }
471 }
472
473 for (declaration_id, declaration) in file_declarations {
474 match declaration {
475 Declaration::File {
476 project_entry_id, ..
477 } => {
478 if !included_buffer_entry_ids.contains(&project_entry_id) {
479 result.push((declaration_id, declaration));
480
481 if result.len() == N {
482 return Vec::new();
483 }
484 }
485 }
486 Declaration::Buffer { .. } => {}
487 }
488 }
489
490 result
491 }
492
493 pub fn buffer_declarations_containing_range(
494 &self,
495 buffer_id: BufferId,
496 range: Range<usize>,
497 ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
498 let Some(buffer_state) = self.buffers.get(&buffer_id) else {
499 return itertools::Either::Left(iter::empty());
500 };
501
502 let iter = buffer_state
503 .declarations
504 .iter()
505 .filter_map(move |declaration_id| {
506 let Some(declaration) = self
507 .declarations
508 .get(*declaration_id)
509 .and_then(|d| d.as_buffer())
510 else {
511 log::error!("bug: missing buffer outline declaration");
512 return None;
513 };
514 if declaration.item_range.contains_inclusive(&range) {
515 return Some((*declaration_id, declaration));
516 }
517 return None;
518 });
519 itertools::Either::Right(iter)
520 }
521
522 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
523 match declaration {
524 Declaration::File {
525 project_entry_id, ..
526 } => self
527 .files
528 .get(project_entry_id)
529 .map(|file_state| file_state.declarations.len())
530 .unwrap_or_default(),
531 Declaration::Buffer { buffer_id, .. } => self
532 .buffers
533 .get(buffer_id)
534 .map(|buffer_state| buffer_state.declarations.len())
535 .unwrap_or_default(),
536 }
537 }
538
539 fn remove_buffer_declarations(
540 old_declaration_ids: &[DeclarationId],
541 declarations: &mut SlotMap<DeclarationId, Declaration>,
542 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
543 ) {
544 for old_declaration_id in old_declaration_ids {
545 let Some(declaration) = declarations.remove(*old_declaration_id) else {
546 debug_panic!("declaration not found");
547 continue;
548 };
549 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
550 identifier_declarations.remove(old_declaration_id);
551 }
552 }
553 }
554}
555
556#[cfg(test)]
557mod tests {
558 use super::*;
559 use std::{path::Path, sync::Arc};
560
561 use gpui::TestAppContext;
562 use indoc::indoc;
563 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
564 use project::{FakeFs, Project};
565 use serde_json::json;
566 use settings::SettingsStore;
567 use text::OffsetRangeExt as _;
568 use util::path;
569
570 use crate::syntax_index::SyntaxIndex;
571
572 #[gpui::test]
573 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
574 let (project, index, rust_lang_id) = init_test(cx).await;
575 let main = Identifier {
576 name: "main".into(),
577 language_id: rust_lang_id,
578 };
579
580 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
581 let index_state = index_state.lock().await;
582 cx.update(|cx| {
583 let decls = index_state.declarations_for_identifier::<8>(&main);
584 assert_eq!(decls.len(), 2);
585
586 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
587 assert_eq!(decl.identifier, main.clone());
588 assert_eq!(decl.item_range_in_file, 32..280);
589
590 let decl = expect_file_decl("a.rs", &decls[1].1, &project, cx);
591 assert_eq!(decl.identifier, main);
592 assert_eq!(decl.item_range_in_file, 0..98);
593 });
594 }
595
596 #[gpui::test]
597 async fn test_parents_in_file(cx: &mut TestAppContext) {
598 let (project, index, rust_lang_id) = init_test(cx).await;
599 let test_process_data = Identifier {
600 name: "test_process_data".into(),
601 language_id: rust_lang_id,
602 };
603
604 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
605 let index_state = index_state.lock().await;
606 cx.update(|cx| {
607 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
608 assert_eq!(decls.len(), 1);
609
610 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
611 assert_eq!(decl.identifier, test_process_data);
612
613 let parent_id = decl.parent.unwrap();
614 let parent = index_state.declaration(parent_id).unwrap();
615 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
616 assert_eq!(
617 parent_decl.identifier,
618 Identifier {
619 name: "tests".into(),
620 language_id: rust_lang_id
621 }
622 );
623 assert_eq!(parent_decl.parent, None);
624 });
625 }
626
627 #[gpui::test]
628 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
629 let (project, index, rust_lang_id) = init_test(cx).await;
630 let test_process_data = Identifier {
631 name: "test_process_data".into(),
632 language_id: rust_lang_id,
633 };
634
635 let buffer = project
636 .update(cx, |project, cx| {
637 let project_path = project.find_project_path("c.rs", cx).unwrap();
638 project.open_buffer(project_path, cx)
639 })
640 .await
641 .unwrap();
642
643 cx.run_until_parked();
644
645 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
646 let index_state = index_state.lock().await;
647 cx.update(|cx| {
648 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
649 assert_eq!(decls.len(), 1);
650
651 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
652 assert_eq!(decl.identifier, test_process_data);
653
654 let parent_id = decl.parent.unwrap();
655 let parent = index_state.declaration(parent_id).unwrap();
656 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
657 assert_eq!(
658 parent_decl.identifier,
659 Identifier {
660 name: "tests".into(),
661 language_id: rust_lang_id
662 }
663 );
664 assert_eq!(parent_decl.parent, None);
665 });
666
667 drop(buffer);
668 }
669
670 #[gpui::test]
671 async fn test_declarations_limt(cx: &mut TestAppContext) {
672 let (_, index, rust_lang_id) = init_test(cx).await;
673
674 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
675 let index_state = index_state.lock().await;
676 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
677 name: "main".into(),
678 language_id: rust_lang_id,
679 });
680 assert_eq!(decls.len(), 0);
681 }
682
683 #[gpui::test]
684 async fn test_buffer_shadow(cx: &mut TestAppContext) {
685 let (project, index, rust_lang_id) = init_test(cx).await;
686
687 let main = Identifier {
688 name: "main".into(),
689 language_id: rust_lang_id,
690 };
691
692 let buffer = project
693 .update(cx, |project, cx| {
694 let project_path = project.find_project_path("c.rs", cx).unwrap();
695 project.open_buffer(project_path, cx)
696 })
697 .await
698 .unwrap();
699
700 cx.run_until_parked();
701
702 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
703 {
704 let index_state = index_state_arc.lock().await;
705
706 cx.update(|cx| {
707 let decls = index_state.declarations_for_identifier::<8>(&main);
708 assert_eq!(decls.len(), 2);
709 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
710 assert_eq!(decl.identifier, main);
711 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
712
713 expect_file_decl("a.rs", &decls[1].1, &project, cx);
714 });
715 }
716
717 // Drop the buffer and wait for release
718 cx.update(|_| {
719 drop(buffer);
720 });
721 cx.run_until_parked();
722
723 let index_state = index_state_arc.lock().await;
724
725 cx.update(|cx| {
726 let decls = index_state.declarations_for_identifier::<8>(&main);
727 assert_eq!(decls.len(), 2);
728 expect_file_decl("c.rs", &decls[0].1, &project, cx);
729 expect_file_decl("a.rs", &decls[1].1, &project, cx);
730 });
731 }
732
733 fn expect_buffer_decl<'a>(
734 path: &str,
735 declaration: &'a Declaration,
736 project: &Entity<Project>,
737 cx: &App,
738 ) -> &'a BufferDeclaration {
739 if let Declaration::Buffer {
740 declaration,
741 project_entry_id,
742 ..
743 } = declaration
744 {
745 let project_path = project
746 .read(cx)
747 .path_for_entry(*project_entry_id, cx)
748 .unwrap();
749 assert_eq!(project_path.path.as_ref(), Path::new(path),);
750 declaration
751 } else {
752 panic!("Expected a buffer declaration, found {:?}", declaration);
753 }
754 }
755
756 fn expect_file_decl<'a>(
757 path: &str,
758 declaration: &'a Declaration,
759 project: &Entity<Project>,
760 cx: &App,
761 ) -> &'a FileDeclaration {
762 if let Declaration::File {
763 declaration,
764 project_entry_id: file,
765 } = declaration
766 {
767 assert_eq!(
768 project
769 .read(cx)
770 .path_for_entry(*file, cx)
771 .unwrap()
772 .path
773 .as_ref(),
774 Path::new(path),
775 );
776 declaration
777 } else {
778 panic!("Expected a file declaration, found {:?}", declaration);
779 }
780 }
781
782 async fn init_test(
783 cx: &mut TestAppContext,
784 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
785 cx.update(|cx| {
786 let settings_store = SettingsStore::test(cx);
787 cx.set_global(settings_store);
788 language::init(cx);
789 Project::init_settings(cx);
790 });
791
792 let fs = FakeFs::new(cx.executor());
793 fs.insert_tree(
794 path!("/root"),
795 json!({
796 "a.rs": indoc! {r#"
797 fn main() {
798 let x = 1;
799 let y = 2;
800 let z = add(x, y);
801 println!("Result: {}", z);
802 }
803
804 fn add(a: i32, b: i32) -> i32 {
805 a + b
806 }
807 "#},
808 "b.rs": indoc! {"
809 pub struct Config {
810 pub name: String,
811 pub value: i32,
812 }
813
814 impl Config {
815 pub fn new(name: String, value: i32) -> Self {
816 Config { name, value }
817 }
818 }
819 "},
820 "c.rs": indoc! {r#"
821 use std::collections::HashMap;
822
823 fn main() {
824 let args: Vec<String> = std::env::args().collect();
825 let data: Vec<i32> = args[1..]
826 .iter()
827 .filter_map(|s| s.parse().ok())
828 .collect();
829 let result = process_data(data);
830 println!("{:?}", result);
831 }
832
833 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
834 let mut counts = HashMap::new();
835 for value in data {
836 *counts.entry(value).or_insert(0) += 1;
837 }
838 counts
839 }
840
841 #[cfg(test)]
842 mod tests {
843 use super::*;
844
845 #[test]
846 fn test_process_data() {
847 let data = vec![1, 2, 2, 3];
848 let result = process_data(data);
849 assert_eq!(result.get(&2), Some(&2));
850 }
851 }
852 "#}
853 }),
854 )
855 .await;
856 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
857 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
858 let lang = rust_lang();
859 let lang_id = lang.id();
860 language_registry.add(Arc::new(lang));
861
862 let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
863 cx.run_until_parked();
864
865 (project, index, lang_id)
866 }
867
868 fn rust_lang() -> Language {
869 Language::new(
870 LanguageConfig {
871 name: "Rust".into(),
872 matcher: LanguageMatcher {
873 path_suffixes: vec!["rs".to_string()],
874 ..Default::default()
875 },
876 ..Default::default()
877 },
878 Some(tree_sitter_rust::LANGUAGE.into()),
879 )
880 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
881 .unwrap()
882 }
883}