1use std::sync::Arc;
2
3use collections::{HashMap, HashSet};
4use futures::lock::Mutex;
5use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
6use language::{Buffer, BufferEvent};
7use project::buffer_store::{BufferStore, BufferStoreEvent};
8use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
9use project::{PathChange, Project, ProjectEntryId, ProjectPath};
10use slotmap::SlotMap;
11use text::BufferId;
12use util::{ResultExt as _, debug_panic, some_or_debug_panic};
13
14use crate::declaration::{
15 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
16};
17use crate::outline::declarations_in_buffer;
18
19// TODO:
20//
21// * Skip for remote projects
22//
23// * Consider making SyntaxIndex not an Entity.
24
25// Potential future improvements:
26//
27// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
28// references are present and their scores.
29
30// Potential future optimizations:
31//
32// * Cache of buffers for files
33//
34// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
35// languages? Will also need to find line boundaries, but that can be done by scanning characters in
36// the flat representation.
37//
38// * Use something similar to slotmap without key versions.
39//
40// * Concurrent slotmap
41//
42// * Use queue for parsing
43//
44
45pub struct SyntaxIndex {
46 state: Arc<Mutex<SyntaxIndexState>>,
47 project: WeakEntity<Project>,
48}
49
50#[derive(Default)]
51pub struct SyntaxIndexState {
52 declarations: SlotMap<DeclarationId, Declaration>,
53 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
54 files: HashMap<ProjectEntryId, FileState>,
55 buffers: HashMap<BufferId, BufferState>,
56}
57
58#[derive(Debug, Default)]
59struct FileState {
60 declarations: Vec<DeclarationId>,
61 task: Option<Task<()>>,
62}
63
64#[derive(Default)]
65struct BufferState {
66 declarations: Vec<DeclarationId>,
67 task: Option<Task<()>>,
68}
69
70impl SyntaxIndex {
71 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
72 let mut this = Self {
73 project: project.downgrade(),
74 state: Arc::new(Mutex::new(SyntaxIndexState::default())),
75 };
76
77 let worktree_store = project.read(cx).worktree_store();
78 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
79 .detach();
80
81 for worktree in worktree_store
82 .read(cx)
83 .worktrees()
84 .map(|w| w.read(cx).snapshot())
85 .collect::<Vec<_>>()
86 {
87 for entry in worktree.files(false, 0) {
88 this.update_file(
89 entry.id,
90 ProjectPath {
91 worktree_id: worktree.id(),
92 path: entry.path.clone(),
93 },
94 cx,
95 );
96 }
97 }
98
99 let buffer_store = project.read(cx).buffer_store().clone();
100 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
101 this.register_buffer(&buffer, cx);
102 }
103 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
104 .detach();
105
106 this
107 }
108
109 fn handle_worktree_store_event(
110 &mut self,
111 _worktree_store: Entity<WorktreeStore>,
112 event: &WorktreeStoreEvent,
113 cx: &mut Context<Self>,
114 ) {
115 use WorktreeStoreEvent::*;
116 match event {
117 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
118 let state = Arc::downgrade(&self.state);
119 let worktree_id = *worktree_id;
120 let updated_entries_set = updated_entries_set.clone();
121 cx.spawn(async move |this, cx| {
122 let Some(state) = state.upgrade() else { return };
123 for (path, entry_id, path_change) in updated_entries_set.iter() {
124 if let PathChange::Removed = path_change {
125 state.lock().await.files.remove(entry_id);
126 } else {
127 let project_path = ProjectPath {
128 worktree_id,
129 path: path.clone(),
130 };
131 this.update(cx, |this, cx| {
132 this.update_file(*entry_id, project_path, cx);
133 })
134 .ok();
135 }
136 }
137 })
138 .detach();
139 }
140 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
141 let project_entry_id = *project_entry_id;
142 self.with_state(cx, move |state| {
143 state.files.remove(&project_entry_id);
144 })
145 }
146 _ => {}
147 }
148 }
149
150 fn handle_buffer_store_event(
151 &mut self,
152 _buffer_store: Entity<BufferStore>,
153 event: &BufferStoreEvent,
154 cx: &mut Context<Self>,
155 ) {
156 use BufferStoreEvent::*;
157 match event {
158 BufferAdded(buffer) => self.register_buffer(buffer, cx),
159 BufferOpened { .. }
160 | BufferChangedFilePath { .. }
161 | BufferDropped { .. }
162 | SharedBufferClosed { .. } => {}
163 }
164 }
165
166 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
167 &self.state
168 }
169
170 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
171 if let Some(mut state) = self.state.try_lock() {
172 f(&mut state);
173 return;
174 }
175 let state = Arc::downgrade(&self.state);
176 cx.background_spawn(async move {
177 let Some(state) = state.upgrade() else {
178 return None;
179 };
180 let mut state = state.lock().await;
181 Some(f(&mut state))
182 })
183 .detach();
184 }
185
186 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
187 let buffer_id = buffer.read(cx).remote_id();
188 cx.observe_release(buffer, move |this, _buffer, cx| {
189 this.with_state(cx, move |state| {
190 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
191 SyntaxIndexState::remove_buffer_declarations(
192 &buffer_state.declarations,
193 &mut state.declarations,
194 &mut state.identifiers,
195 );
196 }
197 })
198 })
199 .detach();
200 cx.subscribe(buffer, Self::handle_buffer_event).detach();
201
202 self.update_buffer(buffer.clone(), cx);
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited => self.update_buffer(buffer, cx),
213 _ => {}
214 }
215 }
216
217 fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
218 let buffer = buffer_entity.read(cx);
219
220 let Some(project_entry_id) =
221 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
222 else {
223 return;
224 };
225 let buffer_id = buffer.remote_id();
226
227 let mut parse_status = buffer.parse_status();
228 let snapshot_task = cx.spawn({
229 let weak_buffer = buffer_entity.downgrade();
230 async move |_, cx| {
231 while *parse_status.borrow() != language::ParseStatus::Idle {
232 parse_status.changed().await?;
233 }
234 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
235 }
236 });
237
238 let parse_task = cx.background_spawn(async move {
239 let snapshot = snapshot_task.await?;
240 let rope = snapshot.text.as_rope().clone();
241
242 anyhow::Ok((
243 declarations_in_buffer(&snapshot)
244 .into_iter()
245 .map(|item| {
246 (
247 item.parent_index,
248 BufferDeclaration::from_outline(item, &rope),
249 )
250 })
251 .collect::<Vec<_>>(),
252 rope,
253 ))
254 });
255
256 let task = cx.spawn({
257 async move |this, cx| {
258 let Ok((declarations, rope)) = parse_task.await else {
259 return;
260 };
261
262 this.update(cx, move |this, cx| {
263 this.with_state(cx, move |state| {
264 let buffer_state = state
265 .buffers
266 .entry(buffer_id)
267 .or_insert_with(Default::default);
268
269 SyntaxIndexState::remove_buffer_declarations(
270 &buffer_state.declarations,
271 &mut state.declarations,
272 &mut state.identifiers,
273 );
274
275 let mut new_ids = Vec::with_capacity(declarations.len());
276 state.declarations.reserve(declarations.len());
277 for (parent_index, mut declaration) in declarations {
278 declaration.parent = parent_index
279 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
280
281 let identifier = declaration.identifier.clone();
282 let declaration_id = state.declarations.insert(Declaration::Buffer {
283 rope: rope.clone(),
284 buffer_id,
285 declaration,
286 project_entry_id,
287 });
288 new_ids.push(declaration_id);
289
290 state
291 .identifiers
292 .entry(identifier)
293 .or_default()
294 .insert(declaration_id);
295 }
296
297 buffer_state.declarations = new_ids;
298 });
299 })
300 .ok();
301 }
302 });
303
304 self.with_state(cx, move |state| {
305 state
306 .buffers
307 .entry(buffer_id)
308 .or_insert_with(Default::default)
309 .task = Some(task)
310 });
311 }
312
313 fn update_file(
314 &mut self,
315 entry_id: ProjectEntryId,
316 project_path: ProjectPath,
317 cx: &mut Context<Self>,
318 ) {
319 let Some(project) = self.project.upgrade() else {
320 return;
321 };
322 let project = project.read(cx);
323 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
324 return;
325 };
326 let language_registry = project.languages().clone();
327
328 let snapshot_task = worktree.update(cx, |worktree, cx| {
329 let load_task = worktree.load_file(&project_path.path, cx);
330 cx.spawn(async move |_this, cx| {
331 let loaded_file = load_task.await?;
332 let language = language_registry
333 .language_for_file_path(&project_path.path)
334 .await
335 .log_err();
336
337 let buffer = cx.new(|cx| {
338 let mut buffer = Buffer::local(loaded_file.text, cx);
339 buffer.set_language(language, cx);
340 buffer
341 })?;
342
343 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
344 while *parse_status.borrow() != language::ParseStatus::Idle {
345 parse_status.changed().await?;
346 }
347
348 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
349 })
350 });
351
352 let parse_task = cx.background_spawn(async move {
353 let snapshot = snapshot_task.await?;
354 let rope = snapshot.as_rope();
355 let declarations = declarations_in_buffer(&snapshot)
356 .into_iter()
357 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
358 .collect::<Vec<_>>();
359 anyhow::Ok(declarations)
360 });
361
362 let task = cx.spawn({
363 async move |this, cx| {
364 // TODO: how to handle errors?
365 let Ok(declarations) = parse_task.await else {
366 return;
367 };
368 this.update(cx, |this, cx| {
369 this.with_state(cx, move |state| {
370 let file_state =
371 state.files.entry(entry_id).or_insert_with(Default::default);
372
373 for old_declaration_id in &file_state.declarations {
374 let Some(declaration) = state.declarations.remove(*old_declaration_id)
375 else {
376 debug_panic!("declaration not found");
377 continue;
378 };
379 if let Some(identifier_declarations) =
380 state.identifiers.get_mut(declaration.identifier())
381 {
382 identifier_declarations.remove(old_declaration_id);
383 }
384 }
385
386 let mut new_ids = Vec::with_capacity(declarations.len());
387 state.declarations.reserve(declarations.len());
388
389 for (parent_index, mut declaration) in declarations {
390 declaration.parent = parent_index
391 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
392
393 let identifier = declaration.identifier.clone();
394 let declaration_id = state.declarations.insert(Declaration::File {
395 project_entry_id: entry_id,
396 declaration,
397 });
398 new_ids.push(declaration_id);
399
400 state
401 .identifiers
402 .entry(identifier)
403 .or_default()
404 .insert(declaration_id);
405 }
406
407 file_state.declarations = new_ids;
408 });
409 })
410 .ok();
411 }
412 });
413
414 self.with_state(cx, move |state| {
415 state
416 .files
417 .entry(entry_id)
418 .or_insert_with(Default::default)
419 .task = Some(task);
420 });
421 }
422}
423
424impl SyntaxIndexState {
425 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
426 self.declarations.get(id)
427 }
428
429 pub fn declarations_for_identifier<const N: usize>(
430 &self,
431 identifier: &Identifier,
432 ) -> Vec<Declaration> {
433 // make sure to not have a large stack allocation
434 assert!(N < 32);
435
436 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
437 return vec![];
438 };
439
440 let mut result = Vec::with_capacity(N);
441 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
442 let mut file_declarations = Vec::new();
443
444 for declaration_id in declaration_ids {
445 let declaration = self.declarations.get(*declaration_id);
446 let Some(declaration) = some_or_debug_panic(declaration) else {
447 continue;
448 };
449 match declaration {
450 Declaration::Buffer {
451 project_entry_id, ..
452 } => {
453 included_buffer_entry_ids.push(*project_entry_id);
454 result.push(declaration.clone());
455 if result.len() == N {
456 return result;
457 }
458 }
459 Declaration::File {
460 project_entry_id, ..
461 } => {
462 if !included_buffer_entry_ids.contains(&project_entry_id) {
463 file_declarations.push(declaration.clone());
464 }
465 }
466 }
467 }
468
469 for declaration in file_declarations {
470 match declaration {
471 Declaration::File {
472 project_entry_id, ..
473 } => {
474 if !included_buffer_entry_ids.contains(&project_entry_id) {
475 result.push(declaration);
476
477 if result.len() == N {
478 return result;
479 }
480 }
481 }
482 Declaration::Buffer { .. } => {}
483 }
484 }
485
486 result
487 }
488
489 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
490 match declaration {
491 Declaration::File {
492 project_entry_id, ..
493 } => self
494 .files
495 .get(project_entry_id)
496 .map(|file_state| file_state.declarations.len())
497 .unwrap_or_default(),
498 Declaration::Buffer { buffer_id, .. } => self
499 .buffers
500 .get(buffer_id)
501 .map(|buffer_state| buffer_state.declarations.len())
502 .unwrap_or_default(),
503 }
504 }
505
506 fn remove_buffer_declarations(
507 old_declaration_ids: &[DeclarationId],
508 declarations: &mut SlotMap<DeclarationId, Declaration>,
509 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
510 ) {
511 for old_declaration_id in old_declaration_ids {
512 let Some(declaration) = declarations.remove(*old_declaration_id) else {
513 debug_panic!("declaration not found");
514 continue;
515 };
516 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
517 identifier_declarations.remove(old_declaration_id);
518 }
519 }
520 }
521}
522
523#[cfg(test)]
524mod tests {
525 use super::*;
526 use std::{path::Path, sync::Arc};
527
528 use gpui::TestAppContext;
529 use indoc::indoc;
530 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
531 use project::{FakeFs, Project};
532 use serde_json::json;
533 use settings::SettingsStore;
534 use text::OffsetRangeExt as _;
535 use util::path;
536
537 use crate::syntax_index::SyntaxIndex;
538
539 #[gpui::test]
540 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
541 let (project, index, rust_lang_id) = init_test(cx).await;
542 let main = Identifier {
543 name: "main".into(),
544 language_id: rust_lang_id,
545 };
546
547 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
548 let index_state = index_state.lock().await;
549 cx.update(|cx| {
550 let decls = index_state.declarations_for_identifier::<8>(&main);
551 assert_eq!(decls.len(), 2);
552
553 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
554 assert_eq!(decl.identifier, main.clone());
555 assert_eq!(decl.item_range_in_file, 32..280);
556
557 let decl = expect_file_decl("a.rs", &decls[1], &project, cx);
558 assert_eq!(decl.identifier, main);
559 assert_eq!(decl.item_range_in_file, 0..98);
560 });
561 }
562
563 #[gpui::test]
564 async fn test_parents_in_file(cx: &mut TestAppContext) {
565 let (project, index, rust_lang_id) = init_test(cx).await;
566 let test_process_data = Identifier {
567 name: "test_process_data".into(),
568 language_id: rust_lang_id,
569 };
570
571 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
572 let index_state = index_state.lock().await;
573 cx.update(|cx| {
574 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
575 assert_eq!(decls.len(), 1);
576
577 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
578 assert_eq!(decl.identifier, test_process_data);
579
580 let parent_id = decl.parent.unwrap();
581 let parent = index_state.declaration(parent_id).unwrap();
582 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
583 assert_eq!(
584 parent_decl.identifier,
585 Identifier {
586 name: "tests".into(),
587 language_id: rust_lang_id
588 }
589 );
590 assert_eq!(parent_decl.parent, None);
591 });
592 }
593
594 #[gpui::test]
595 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
596 let (project, index, rust_lang_id) = init_test(cx).await;
597 let test_process_data = Identifier {
598 name: "test_process_data".into(),
599 language_id: rust_lang_id,
600 };
601
602 let buffer = project
603 .update(cx, |project, cx| {
604 let project_path = project.find_project_path("c.rs", cx).unwrap();
605 project.open_buffer(project_path, cx)
606 })
607 .await
608 .unwrap();
609
610 cx.run_until_parked();
611
612 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
613 let index_state = index_state.lock().await;
614 cx.update(|cx| {
615 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
616 assert_eq!(decls.len(), 1);
617
618 let decl = expect_buffer_decl("c.rs", &decls[0], &project, cx);
619 assert_eq!(decl.identifier, test_process_data);
620
621 let parent_id = decl.parent.unwrap();
622 let parent = index_state.declaration(parent_id).unwrap();
623 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
624 assert_eq!(
625 parent_decl.identifier,
626 Identifier {
627 name: "tests".into(),
628 language_id: rust_lang_id
629 }
630 );
631 assert_eq!(parent_decl.parent, None);
632 });
633
634 drop(buffer);
635 }
636
637 #[gpui::test]
638 async fn test_declarations_limt(cx: &mut TestAppContext) {
639 let (_, index, rust_lang_id) = init_test(cx).await;
640
641 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
642 let index_state = index_state.lock().await;
643 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
644 name: "main".into(),
645 language_id: rust_lang_id,
646 });
647 assert_eq!(decls.len(), 1);
648 }
649
650 #[gpui::test]
651 async fn test_buffer_shadow(cx: &mut TestAppContext) {
652 let (project, index, rust_lang_id) = init_test(cx).await;
653
654 let main = Identifier {
655 name: "main".into(),
656 language_id: rust_lang_id,
657 };
658
659 let buffer = project
660 .update(cx, |project, cx| {
661 let project_path = project.find_project_path("c.rs", cx).unwrap();
662 project.open_buffer(project_path, cx)
663 })
664 .await
665 .unwrap();
666
667 cx.run_until_parked();
668
669 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
670 {
671 let index_state = index_state_arc.lock().await;
672
673 cx.update(|cx| {
674 let decls = index_state.declarations_for_identifier::<8>(&main);
675 assert_eq!(decls.len(), 2);
676 let decl = expect_buffer_decl("c.rs", &decls[0], &project, cx);
677 assert_eq!(decl.identifier, main);
678 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..279);
679
680 expect_file_decl("a.rs", &decls[1], &project, cx);
681 });
682 }
683
684 // Drop the buffer and wait for release
685 cx.update(|_| {
686 drop(buffer);
687 });
688 cx.run_until_parked();
689
690 let index_state = index_state_arc.lock().await;
691
692 cx.update(|cx| {
693 let decls = index_state.declarations_for_identifier::<8>(&main);
694 assert_eq!(decls.len(), 2);
695 expect_file_decl("c.rs", &decls[0], &project, cx);
696 expect_file_decl("a.rs", &decls[1], &project, cx);
697 });
698 }
699
700 fn expect_buffer_decl<'a>(
701 path: &str,
702 declaration: &'a Declaration,
703 project: &Entity<Project>,
704 cx: &App,
705 ) -> &'a BufferDeclaration {
706 if let Declaration::Buffer {
707 declaration,
708 project_entry_id,
709 ..
710 } = declaration
711 {
712 let project_path = project
713 .read(cx)
714 .path_for_entry(*project_entry_id, cx)
715 .unwrap();
716 assert_eq!(project_path.path.as_ref(), Path::new(path),);
717 declaration
718 } else {
719 panic!("Expected a buffer declaration, found {:?}", declaration);
720 }
721 }
722
723 fn expect_file_decl<'a>(
724 path: &str,
725 declaration: &'a Declaration,
726 project: &Entity<Project>,
727 cx: &App,
728 ) -> &'a FileDeclaration {
729 if let Declaration::File {
730 declaration,
731 project_entry_id: file,
732 } = declaration
733 {
734 assert_eq!(
735 project
736 .read(cx)
737 .path_for_entry(*file, cx)
738 .unwrap()
739 .path
740 .as_ref(),
741 Path::new(path),
742 );
743 declaration
744 } else {
745 panic!("Expected a file declaration, found {:?}", declaration);
746 }
747 }
748
749 async fn init_test(
750 cx: &mut TestAppContext,
751 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
752 cx.update(|cx| {
753 let settings_store = SettingsStore::test(cx);
754 cx.set_global(settings_store);
755 language::init(cx);
756 Project::init_settings(cx);
757 });
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/root"),
762 json!({
763 "a.rs": indoc! {r#"
764 fn main() {
765 let x = 1;
766 let y = 2;
767 let z = add(x, y);
768 println!("Result: {}", z);
769 }
770
771 fn add(a: i32, b: i32) -> i32 {
772 a + b
773 }
774 "#},
775 "b.rs": indoc! {"
776 pub struct Config {
777 pub name: String,
778 pub value: i32,
779 }
780
781 impl Config {
782 pub fn new(name: String, value: i32) -> Self {
783 Config { name, value }
784 }
785 }
786 "},
787 "c.rs": indoc! {r#"
788 use std::collections::HashMap;
789
790 fn main() {
791 let args: Vec<String> = std::env::args().collect();
792 let data: Vec<i32> = args[1..]
793 .iter()
794 .filter_map(|s| s.parse().ok())
795 .collect();
796 let result = process_data(data);
797 println!("{:?}", result);
798 }
799
800 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
801 let mut counts = HashMap::new();
802 for value in data {
803 *counts.entry(value).or_insert(0) += 1;
804 }
805 counts
806 }
807
808 #[cfg(test)]
809 mod tests {
810 use super::*;
811
812 #[test]
813 fn test_process_data() {
814 let data = vec![1, 2, 2, 3];
815 let result = process_data(data);
816 assert_eq!(result.get(&2), Some(&2));
817 }
818 }
819 "#}
820 }),
821 )
822 .await;
823 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825 let lang = rust_lang();
826 let lang_id = lang.id();
827 language_registry.add(Arc::new(lang));
828
829 let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
830 cx.run_until_parked();
831
832 (project, index, lang_id)
833 }
834
835 fn rust_lang() -> Language {
836 Language::new(
837 LanguageConfig {
838 name: "Rust".into(),
839 matcher: LanguageMatcher {
840 path_suffixes: vec!["rs".to_string()],
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 Some(tree_sitter_rust::LANGUAGE.into()),
846 )
847 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
848 .unwrap()
849 }
850}