1use collections::{HashMap, HashSet};
2use futures::lock::Mutex;
3use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
4use language::{Buffer, BufferEvent};
5use project::buffer_store::{BufferStore, BufferStoreEvent};
6use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
7use project::{PathChange, Project, ProjectEntryId, ProjectPath};
8use slotmap::SlotMap;
9use std::iter;
10use std::ops::Range;
11use std::sync::Arc;
12use text::BufferId;
13use util::{RangeExt as _, debug_panic, some_or_debug_panic};
14
15use crate::declaration::{
16 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
17};
18use crate::outline::declarations_in_buffer;
19
20// Potential future improvements:
21//
22// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
23// references are present and their scores.
24
25// Potential future optimizations:
26//
27// * Cache of buffers for files
28//
29// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
30// languages? Will also need to find line boundaries, but that can be done by scanning characters in
31// the flat representation.
32//
33// * Use something similar to slotmap without key versions.
34//
35// * Concurrent slotmap
36//
37// * Use queue for parsing
38
39pub struct SyntaxIndex {
40 state: Arc<Mutex<SyntaxIndexState>>,
41 project: WeakEntity<Project>,
42}
43
44#[derive(Default)]
45pub struct SyntaxIndexState {
46 declarations: SlotMap<DeclarationId, Declaration>,
47 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
48 files: HashMap<ProjectEntryId, FileState>,
49 buffers: HashMap<BufferId, BufferState>,
50}
51
52#[derive(Debug, Default)]
53struct FileState {
54 declarations: Vec<DeclarationId>,
55 task: Option<Task<()>>,
56}
57
58#[derive(Default)]
59struct BufferState {
60 declarations: Vec<DeclarationId>,
61 task: Option<Task<()>>,
62}
63
64impl SyntaxIndex {
65 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
66 let mut this = Self {
67 project: project.downgrade(),
68 state: Arc::new(Mutex::new(SyntaxIndexState::default())),
69 };
70
71 let worktree_store = project.read(cx).worktree_store();
72 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
73 .detach();
74
75 for worktree in worktree_store
76 .read(cx)
77 .worktrees()
78 .map(|w| w.read(cx).snapshot())
79 .collect::<Vec<_>>()
80 {
81 for entry in worktree.files(false, 0) {
82 this.update_file(
83 entry.id,
84 ProjectPath {
85 worktree_id: worktree.id(),
86 path: entry.path.clone(),
87 },
88 cx,
89 );
90 }
91 }
92
93 let buffer_store = project.read(cx).buffer_store().clone();
94 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
95 this.register_buffer(&buffer, cx);
96 }
97 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
98 .detach();
99
100 this
101 }
102
103 fn handle_worktree_store_event(
104 &mut self,
105 _worktree_store: Entity<WorktreeStore>,
106 event: &WorktreeStoreEvent,
107 cx: &mut Context<Self>,
108 ) {
109 use WorktreeStoreEvent::*;
110 match event {
111 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
112 let state = Arc::downgrade(&self.state);
113 let worktree_id = *worktree_id;
114 let updated_entries_set = updated_entries_set.clone();
115 cx.spawn(async move |this, cx| {
116 let Some(state) = state.upgrade() else { return };
117 for (path, entry_id, path_change) in updated_entries_set.iter() {
118 if let PathChange::Removed = path_change {
119 state.lock().await.files.remove(entry_id);
120 } else {
121 let project_path = ProjectPath {
122 worktree_id,
123 path: path.clone(),
124 };
125 this.update(cx, |this, cx| {
126 this.update_file(*entry_id, project_path, cx);
127 })
128 .ok();
129 }
130 }
131 })
132 .detach();
133 }
134 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
135 let project_entry_id = *project_entry_id;
136 self.with_state(cx, move |state| {
137 state.files.remove(&project_entry_id);
138 })
139 }
140 _ => {}
141 }
142 }
143
144 fn handle_buffer_store_event(
145 &mut self,
146 _buffer_store: Entity<BufferStore>,
147 event: &BufferStoreEvent,
148 cx: &mut Context<Self>,
149 ) {
150 use BufferStoreEvent::*;
151 match event {
152 BufferAdded(buffer) => self.register_buffer(buffer, cx),
153 BufferOpened { .. }
154 | BufferChangedFilePath { .. }
155 | BufferDropped { .. }
156 | SharedBufferClosed { .. } => {}
157 }
158 }
159
160 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
161 &self.state
162 }
163
164 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
165 if let Some(mut state) = self.state.try_lock() {
166 f(&mut state);
167 return;
168 }
169 let state = Arc::downgrade(&self.state);
170 cx.background_spawn(async move {
171 let Some(state) = state.upgrade() else {
172 return;
173 };
174 let mut state = state.lock().await;
175 f(&mut state)
176 })
177 .detach();
178 }
179
180 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
181 let buffer_id = buffer.read(cx).remote_id();
182 cx.observe_release(buffer, move |this, _buffer, cx| {
183 this.with_state(cx, move |state| {
184 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
185 SyntaxIndexState::remove_buffer_declarations(
186 &buffer_state.declarations,
187 &mut state.declarations,
188 &mut state.identifiers,
189 );
190 }
191 })
192 })
193 .detach();
194 cx.subscribe(buffer, Self::handle_buffer_event).detach();
195
196 self.update_buffer(buffer.clone(), cx);
197 }
198
199 fn handle_buffer_event(
200 &mut self,
201 buffer: Entity<Buffer>,
202 event: &BufferEvent,
203 cx: &mut Context<Self>,
204 ) {
205 match event {
206 BufferEvent::Edited => self.update_buffer(buffer, cx),
207 _ => {}
208 }
209 }
210
211 fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
212 let buffer = buffer_entity.read(cx);
213
214 let Some(project_entry_id) =
215 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
216 else {
217 return;
218 };
219 let buffer_id = buffer.remote_id();
220
221 let mut parse_status = buffer.parse_status();
222 let snapshot_task = cx.spawn({
223 let weak_buffer = buffer_entity.downgrade();
224 async move |_, cx| {
225 while *parse_status.borrow() != language::ParseStatus::Idle {
226 parse_status.changed().await?;
227 }
228 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
229 }
230 });
231
232 let parse_task = cx.background_spawn(async move {
233 let snapshot = snapshot_task.await?;
234 let rope = snapshot.text.as_rope().clone();
235
236 anyhow::Ok((
237 declarations_in_buffer(&snapshot)
238 .into_iter()
239 .map(|item| {
240 (
241 item.parent_index,
242 BufferDeclaration::from_outline(item, &rope),
243 )
244 })
245 .collect::<Vec<_>>(),
246 rope,
247 ))
248 });
249
250 let task = cx.spawn({
251 async move |this, cx| {
252 let Ok((declarations, rope)) = parse_task.await else {
253 return;
254 };
255
256 this.update(cx, move |this, cx| {
257 this.with_state(cx, move |state| {
258 let buffer_state = state
259 .buffers
260 .entry(buffer_id)
261 .or_insert_with(Default::default);
262
263 SyntaxIndexState::remove_buffer_declarations(
264 &buffer_state.declarations,
265 &mut state.declarations,
266 &mut state.identifiers,
267 );
268
269 let mut new_ids = Vec::with_capacity(declarations.len());
270 state.declarations.reserve(declarations.len());
271 for (parent_index, mut declaration) in declarations {
272 declaration.parent = parent_index
273 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
274
275 let identifier = declaration.identifier.clone();
276 let declaration_id = state.declarations.insert(Declaration::Buffer {
277 rope: rope.clone(),
278 buffer_id,
279 declaration,
280 project_entry_id,
281 });
282 new_ids.push(declaration_id);
283
284 state
285 .identifiers
286 .entry(identifier)
287 .or_default()
288 .insert(declaration_id);
289 }
290
291 buffer_state.declarations = new_ids;
292 });
293 })
294 .ok();
295 }
296 });
297
298 self.with_state(cx, move |state| {
299 state
300 .buffers
301 .entry(buffer_id)
302 .or_insert_with(Default::default)
303 .task = Some(task)
304 });
305 }
306
307 fn update_file(
308 &mut self,
309 entry_id: ProjectEntryId,
310 project_path: ProjectPath,
311 cx: &mut Context<Self>,
312 ) {
313 let Some(project) = self.project.upgrade() else {
314 return;
315 };
316 let project = project.read(cx);
317 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
318 return;
319 };
320 let language_registry = project.languages().clone();
321
322 let snapshot_task = worktree.update(cx, |worktree, cx| {
323 let load_task = worktree.load_file(&project_path.path, cx);
324 cx.spawn(async move |_this, cx| {
325 let loaded_file = load_task.await?;
326 let language = language_registry
327 .language_for_file_path(&project_path.path)
328 .await
329 .ok();
330
331 let buffer = cx.new(|cx| {
332 let mut buffer = Buffer::local(loaded_file.text, cx);
333 buffer.set_language(language, cx);
334 buffer
335 })?;
336
337 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
338 while *parse_status.borrow() != language::ParseStatus::Idle {
339 parse_status.changed().await?;
340 }
341
342 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
343 })
344 });
345
346 let parse_task = cx.background_spawn(async move {
347 let snapshot = snapshot_task.await?;
348 let rope = snapshot.as_rope();
349 let declarations = declarations_in_buffer(&snapshot)
350 .into_iter()
351 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
352 .collect::<Vec<_>>();
353 anyhow::Ok(declarations)
354 });
355
356 let task = cx.spawn({
357 async move |this, cx| {
358 // TODO: how to handle errors?
359 let Ok(declarations) = parse_task.await else {
360 return;
361 };
362 this.update(cx, |this, cx| {
363 this.with_state(cx, move |state| {
364 let file_state =
365 state.files.entry(entry_id).or_insert_with(Default::default);
366
367 for old_declaration_id in &file_state.declarations {
368 let Some(declaration) = state.declarations.remove(*old_declaration_id)
369 else {
370 debug_panic!("declaration not found");
371 continue;
372 };
373 if let Some(identifier_declarations) =
374 state.identifiers.get_mut(declaration.identifier())
375 {
376 identifier_declarations.remove(old_declaration_id);
377 }
378 }
379
380 let mut new_ids = Vec::with_capacity(declarations.len());
381 state.declarations.reserve(declarations.len());
382
383 for (parent_index, mut declaration) in declarations {
384 declaration.parent = parent_index
385 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
386
387 let identifier = declaration.identifier.clone();
388 let declaration_id = state.declarations.insert(Declaration::File {
389 project_entry_id: entry_id,
390 declaration,
391 });
392 new_ids.push(declaration_id);
393
394 state
395 .identifiers
396 .entry(identifier)
397 .or_default()
398 .insert(declaration_id);
399 }
400
401 file_state.declarations = new_ids;
402 });
403 })
404 .ok();
405 }
406 });
407
408 self.with_state(cx, move |state| {
409 state
410 .files
411 .entry(entry_id)
412 .or_insert_with(Default::default)
413 .task = Some(task);
414 });
415 }
416}
417
418impl SyntaxIndexState {
419 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
420 self.declarations.get(id)
421 }
422
423 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
424 ///
425 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
426 pub fn declarations_for_identifier<const N: usize>(
427 &self,
428 identifier: &Identifier,
429 ) -> Vec<(DeclarationId, &Declaration)> {
430 // make sure to not have a large stack allocation
431 assert!(N < 32);
432
433 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
434 return vec![];
435 };
436
437 let mut result = Vec::with_capacity(N);
438 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
439 let mut file_declarations = Vec::new();
440
441 for declaration_id in declaration_ids {
442 let declaration = self.declarations.get(*declaration_id);
443 let Some(declaration) = some_or_debug_panic(declaration) else {
444 continue;
445 };
446 match declaration {
447 Declaration::Buffer {
448 project_entry_id, ..
449 } => {
450 included_buffer_entry_ids.push(*project_entry_id);
451 result.push((*declaration_id, declaration));
452 if result.len() == N {
453 return Vec::new();
454 }
455 }
456 Declaration::File {
457 project_entry_id, ..
458 } => {
459 if !included_buffer_entry_ids.contains(&project_entry_id) {
460 file_declarations.push((*declaration_id, declaration));
461 }
462 }
463 }
464 }
465
466 for (declaration_id, declaration) in file_declarations {
467 match declaration {
468 Declaration::File {
469 project_entry_id, ..
470 } => {
471 if !included_buffer_entry_ids.contains(&project_entry_id) {
472 result.push((declaration_id, declaration));
473
474 if result.len() == N {
475 return Vec::new();
476 }
477 }
478 }
479 Declaration::Buffer { .. } => {}
480 }
481 }
482
483 result
484 }
485
486 pub fn buffer_declarations_containing_range(
487 &self,
488 buffer_id: BufferId,
489 range: Range<usize>,
490 ) -> impl Iterator<Item = (DeclarationId, &BufferDeclaration)> {
491 let Some(buffer_state) = self.buffers.get(&buffer_id) else {
492 return itertools::Either::Left(iter::empty());
493 };
494
495 let iter = buffer_state
496 .declarations
497 .iter()
498 .filter_map(move |declaration_id| {
499 let Some(declaration) = self
500 .declarations
501 .get(*declaration_id)
502 .and_then(|d| d.as_buffer())
503 else {
504 log::error!("bug: missing buffer outline declaration");
505 return None;
506 };
507 if declaration.item_range.contains_inclusive(&range) {
508 return Some((*declaration_id, declaration));
509 }
510 return None;
511 });
512 itertools::Either::Right(iter)
513 }
514
515 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
516 match declaration {
517 Declaration::File {
518 project_entry_id, ..
519 } => self
520 .files
521 .get(project_entry_id)
522 .map(|file_state| file_state.declarations.len())
523 .unwrap_or_default(),
524 Declaration::Buffer { buffer_id, .. } => self
525 .buffers
526 .get(buffer_id)
527 .map(|buffer_state| buffer_state.declarations.len())
528 .unwrap_or_default(),
529 }
530 }
531
532 fn remove_buffer_declarations(
533 old_declaration_ids: &[DeclarationId],
534 declarations: &mut SlotMap<DeclarationId, Declaration>,
535 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
536 ) {
537 for old_declaration_id in old_declaration_ids {
538 let Some(declaration) = declarations.remove(*old_declaration_id) else {
539 debug_panic!("declaration not found");
540 continue;
541 };
542 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
543 identifier_declarations.remove(old_declaration_id);
544 }
545 }
546 }
547}
548
549#[cfg(test)]
550mod tests {
551 use super::*;
552 use std::{path::Path, sync::Arc};
553
554 use gpui::TestAppContext;
555 use indoc::indoc;
556 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
557 use project::{FakeFs, Project};
558 use serde_json::json;
559 use settings::SettingsStore;
560 use text::OffsetRangeExt as _;
561 use util::path;
562
563 use crate::syntax_index::SyntaxIndex;
564
565 #[gpui::test]
566 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
567 let (project, index, rust_lang_id) = init_test(cx).await;
568 let main = Identifier {
569 name: "main".into(),
570 language_id: rust_lang_id,
571 };
572
573 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
574 let index_state = index_state.lock().await;
575 cx.update(|cx| {
576 let decls = index_state.declarations_for_identifier::<8>(&main);
577 assert_eq!(decls.len(), 2);
578
579 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
580 assert_eq!(decl.identifier, main.clone());
581 assert_eq!(decl.item_range, 32..280);
582
583 let decl = expect_file_decl("a.rs", &decls[1].1, &project, cx);
584 assert_eq!(decl.identifier, main);
585 assert_eq!(decl.item_range, 0..98);
586 });
587 }
588
589 #[gpui::test]
590 async fn test_parents_in_file(cx: &mut TestAppContext) {
591 let (project, index, rust_lang_id) = init_test(cx).await;
592 let test_process_data = Identifier {
593 name: "test_process_data".into(),
594 language_id: rust_lang_id,
595 };
596
597 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
598 let index_state = index_state.lock().await;
599 cx.update(|cx| {
600 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
601 assert_eq!(decls.len(), 1);
602
603 let decl = expect_file_decl("c.rs", &decls[0].1, &project, cx);
604 assert_eq!(decl.identifier, test_process_data);
605
606 let parent_id = decl.parent.unwrap();
607 let parent = index_state.declaration(parent_id).unwrap();
608 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
609 assert_eq!(
610 parent_decl.identifier,
611 Identifier {
612 name: "tests".into(),
613 language_id: rust_lang_id
614 }
615 );
616 assert_eq!(parent_decl.parent, None);
617 });
618 }
619
620 #[gpui::test]
621 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
622 let (project, index, rust_lang_id) = init_test(cx).await;
623 let test_process_data = Identifier {
624 name: "test_process_data".into(),
625 language_id: rust_lang_id,
626 };
627
628 let buffer = project
629 .update(cx, |project, cx| {
630 let project_path = project.find_project_path("c.rs", cx).unwrap();
631 project.open_buffer(project_path, cx)
632 })
633 .await
634 .unwrap();
635
636 cx.run_until_parked();
637
638 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
639 let index_state = index_state.lock().await;
640 cx.update(|cx| {
641 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
642 assert_eq!(decls.len(), 1);
643
644 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
645 assert_eq!(decl.identifier, test_process_data);
646
647 let parent_id = decl.parent.unwrap();
648 let parent = index_state.declaration(parent_id).unwrap();
649 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
650 assert_eq!(
651 parent_decl.identifier,
652 Identifier {
653 name: "tests".into(),
654 language_id: rust_lang_id
655 }
656 );
657 assert_eq!(parent_decl.parent, None);
658 });
659
660 drop(buffer);
661 }
662
663 #[gpui::test]
664 async fn test_declarations_limt(cx: &mut TestAppContext) {
665 let (_, index, rust_lang_id) = init_test(cx).await;
666
667 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
668 let index_state = index_state.lock().await;
669 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
670 name: "main".into(),
671 language_id: rust_lang_id,
672 });
673 assert_eq!(decls.len(), 0);
674 }
675
676 #[gpui::test]
677 async fn test_buffer_shadow(cx: &mut TestAppContext) {
678 let (project, index, rust_lang_id) = init_test(cx).await;
679
680 let main = Identifier {
681 name: "main".into(),
682 language_id: rust_lang_id,
683 };
684
685 let buffer = project
686 .update(cx, |project, cx| {
687 let project_path = project.find_project_path("c.rs", cx).unwrap();
688 project.open_buffer(project_path, cx)
689 })
690 .await
691 .unwrap();
692
693 cx.run_until_parked();
694
695 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
696 {
697 let index_state = index_state_arc.lock().await;
698
699 cx.update(|cx| {
700 let decls = index_state.declarations_for_identifier::<8>(&main);
701 assert_eq!(decls.len(), 2);
702 let decl = expect_buffer_decl("c.rs", &decls[0].1, &project, cx);
703 assert_eq!(decl.identifier, main);
704 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
705
706 expect_file_decl("a.rs", &decls[1].1, &project, cx);
707 });
708 }
709
710 // Drop the buffer and wait for release
711 cx.update(|_| {
712 drop(buffer);
713 });
714 cx.run_until_parked();
715
716 let index_state = index_state_arc.lock().await;
717
718 cx.update(|cx| {
719 let decls = index_state.declarations_for_identifier::<8>(&main);
720 assert_eq!(decls.len(), 2);
721 expect_file_decl("c.rs", &decls[0].1, &project, cx);
722 expect_file_decl("a.rs", &decls[1].1, &project, cx);
723 });
724 }
725
726 fn expect_buffer_decl<'a>(
727 path: &str,
728 declaration: &'a Declaration,
729 project: &Entity<Project>,
730 cx: &App,
731 ) -> &'a BufferDeclaration {
732 if let Declaration::Buffer {
733 declaration,
734 project_entry_id,
735 ..
736 } = declaration
737 {
738 let project_path = project
739 .read(cx)
740 .path_for_entry(*project_entry_id, cx)
741 .unwrap();
742 assert_eq!(project_path.path.as_ref(), Path::new(path),);
743 declaration
744 } else {
745 panic!("Expected a buffer declaration, found {:?}", declaration);
746 }
747 }
748
749 fn expect_file_decl<'a>(
750 path: &str,
751 declaration: &'a Declaration,
752 project: &Entity<Project>,
753 cx: &App,
754 ) -> &'a FileDeclaration {
755 if let Declaration::File {
756 declaration,
757 project_entry_id: file,
758 } = declaration
759 {
760 assert_eq!(
761 project
762 .read(cx)
763 .path_for_entry(*file, cx)
764 .unwrap()
765 .path
766 .as_ref(),
767 Path::new(path),
768 );
769 declaration
770 } else {
771 panic!("Expected a file declaration, found {:?}", declaration);
772 }
773 }
774
775 async fn init_test(
776 cx: &mut TestAppContext,
777 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
778 cx.update(|cx| {
779 let settings_store = SettingsStore::test(cx);
780 cx.set_global(settings_store);
781 language::init(cx);
782 Project::init_settings(cx);
783 });
784
785 let fs = FakeFs::new(cx.executor());
786 fs.insert_tree(
787 path!("/root"),
788 json!({
789 "a.rs": indoc! {r#"
790 fn main() {
791 let x = 1;
792 let y = 2;
793 let z = add(x, y);
794 println!("Result: {}", z);
795 }
796
797 fn add(a: i32, b: i32) -> i32 {
798 a + b
799 }
800 "#},
801 "b.rs": indoc! {"
802 pub struct Config {
803 pub name: String,
804 pub value: i32,
805 }
806
807 impl Config {
808 pub fn new(name: String, value: i32) -> Self {
809 Config { name, value }
810 }
811 }
812 "},
813 "c.rs": indoc! {r#"
814 use std::collections::HashMap;
815
816 fn main() {
817 let args: Vec<String> = std::env::args().collect();
818 let data: Vec<i32> = args[1..]
819 .iter()
820 .filter_map(|s| s.parse().ok())
821 .collect();
822 let result = process_data(data);
823 println!("{:?}", result);
824 }
825
826 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
827 let mut counts = HashMap::new();
828 for value in data {
829 *counts.entry(value).or_insert(0) += 1;
830 }
831 counts
832 }
833
834 #[cfg(test)]
835 mod tests {
836 use super::*;
837
838 #[test]
839 fn test_process_data() {
840 let data = vec![1, 2, 2, 3];
841 let result = process_data(data);
842 assert_eq!(result.get(&2), Some(&2));
843 }
844 }
845 "#}
846 }),
847 )
848 .await;
849 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
851 let lang = rust_lang();
852 let lang_id = lang.id();
853 language_registry.add(Arc::new(lang));
854
855 let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
856 cx.run_until_parked();
857
858 (project, index, lang_id)
859 }
860
861 fn rust_lang() -> Language {
862 Language::new(
863 LanguageConfig {
864 name: "Rust".into(),
865 matcher: LanguageMatcher {
866 path_suffixes: vec!["rs".to_string()],
867 ..Default::default()
868 },
869 ..Default::default()
870 },
871 Some(tree_sitter_rust::LANGUAGE.into()),
872 )
873 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
874 .unwrap()
875 }
876}