1use std::sync::Arc;
2
3use collections::{HashMap, HashSet};
4use futures::lock::Mutex;
5use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
6use language::{Buffer, BufferEvent};
7use project::buffer_store::{BufferStore, BufferStoreEvent};
8use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
9use project::{PathChange, Project, ProjectEntryId, ProjectPath};
10use slotmap::SlotMap;
11use text::BufferId;
12use util::{debug_panic, some_or_debug_panic};
13
14use crate::declaration::{
15 BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
16};
17use crate::outline::declarations_in_buffer;
18
19// TODO:
20//
21// * Skip for remote projects
22//
23// * Consider making SyntaxIndex not an Entity.
24
25// Potential future improvements:
26//
27// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
28// references are present and their scores.
29
30// Potential future optimizations:
31//
32// * Cache of buffers for files
33//
34// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
35// languages? Will also need to find line boundaries, but that can be done by scanning characters in
36// the flat representation.
37//
38// * Use something similar to slotmap without key versions.
39//
40// * Concurrent slotmap
41//
42// * Use queue for parsing
43//
44
45pub struct SyntaxIndex {
46 state: Arc<Mutex<SyntaxIndexState>>,
47 project: WeakEntity<Project>,
48}
49
50#[derive(Default)]
51pub struct SyntaxIndexState {
52 declarations: SlotMap<DeclarationId, Declaration>,
53 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
54 files: HashMap<ProjectEntryId, FileState>,
55 buffers: HashMap<BufferId, BufferState>,
56}
57
58#[derive(Debug, Default)]
59struct FileState {
60 declarations: Vec<DeclarationId>,
61 task: Option<Task<()>>,
62}
63
64#[derive(Default)]
65struct BufferState {
66 declarations: Vec<DeclarationId>,
67 task: Option<Task<()>>,
68}
69
70impl SyntaxIndex {
71 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
72 let mut this = Self {
73 project: project.downgrade(),
74 state: Arc::new(Mutex::new(SyntaxIndexState::default())),
75 };
76
77 let worktree_store = project.read(cx).worktree_store();
78 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
79 .detach();
80
81 for worktree in worktree_store
82 .read(cx)
83 .worktrees()
84 .map(|w| w.read(cx).snapshot())
85 .collect::<Vec<_>>()
86 {
87 for entry in worktree.files(false, 0) {
88 this.update_file(
89 entry.id,
90 ProjectPath {
91 worktree_id: worktree.id(),
92 path: entry.path.clone(),
93 },
94 cx,
95 );
96 }
97 }
98
99 let buffer_store = project.read(cx).buffer_store().clone();
100 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
101 this.register_buffer(&buffer, cx);
102 }
103 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
104 .detach();
105
106 this
107 }
108
109 fn handle_worktree_store_event(
110 &mut self,
111 _worktree_store: Entity<WorktreeStore>,
112 event: &WorktreeStoreEvent,
113 cx: &mut Context<Self>,
114 ) {
115 use WorktreeStoreEvent::*;
116 match event {
117 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
118 let state = Arc::downgrade(&self.state);
119 let worktree_id = *worktree_id;
120 let updated_entries_set = updated_entries_set.clone();
121 cx.spawn(async move |this, cx| {
122 let Some(state) = state.upgrade() else { return };
123 for (path, entry_id, path_change) in updated_entries_set.iter() {
124 if let PathChange::Removed = path_change {
125 state.lock().await.files.remove(entry_id);
126 } else {
127 let project_path = ProjectPath {
128 worktree_id,
129 path: path.clone(),
130 };
131 this.update(cx, |this, cx| {
132 this.update_file(*entry_id, project_path, cx);
133 })
134 .ok();
135 }
136 }
137 })
138 .detach();
139 }
140 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
141 let project_entry_id = *project_entry_id;
142 self.with_state(cx, move |state| {
143 state.files.remove(&project_entry_id);
144 })
145 }
146 _ => {}
147 }
148 }
149
150 fn handle_buffer_store_event(
151 &mut self,
152 _buffer_store: Entity<BufferStore>,
153 event: &BufferStoreEvent,
154 cx: &mut Context<Self>,
155 ) {
156 use BufferStoreEvent::*;
157 match event {
158 BufferAdded(buffer) => self.register_buffer(buffer, cx),
159 BufferOpened { .. }
160 | BufferChangedFilePath { .. }
161 | BufferDropped { .. }
162 | SharedBufferClosed { .. } => {}
163 }
164 }
165
166 pub fn state(&self) -> &Arc<Mutex<SyntaxIndexState>> {
167 &self.state
168 }
169
170 fn with_state(&self, cx: &mut App, f: impl FnOnce(&mut SyntaxIndexState) + Send + 'static) {
171 if let Some(mut state) = self.state.try_lock() {
172 f(&mut state);
173 return;
174 }
175 let state = Arc::downgrade(&self.state);
176 cx.background_spawn(async move {
177 let Some(state) = state.upgrade() else {
178 return;
179 };
180 let mut state = state.lock().await;
181 f(&mut state)
182 })
183 .detach();
184 }
185
186 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
187 let buffer_id = buffer.read(cx).remote_id();
188 cx.observe_release(buffer, move |this, _buffer, cx| {
189 this.with_state(cx, move |state| {
190 if let Some(buffer_state) = state.buffers.remove(&buffer_id) {
191 SyntaxIndexState::remove_buffer_declarations(
192 &buffer_state.declarations,
193 &mut state.declarations,
194 &mut state.identifiers,
195 );
196 }
197 })
198 })
199 .detach();
200 cx.subscribe(buffer, Self::handle_buffer_event).detach();
201
202 self.update_buffer(buffer.clone(), cx);
203 }
204
205 fn handle_buffer_event(
206 &mut self,
207 buffer: Entity<Buffer>,
208 event: &BufferEvent,
209 cx: &mut Context<Self>,
210 ) {
211 match event {
212 BufferEvent::Edited => self.update_buffer(buffer, cx),
213 _ => {}
214 }
215 }
216
217 fn update_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) {
218 let buffer = buffer_entity.read(cx);
219
220 let Some(project_entry_id) =
221 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
222 else {
223 return;
224 };
225 let buffer_id = buffer.remote_id();
226
227 let mut parse_status = buffer.parse_status();
228 let snapshot_task = cx.spawn({
229 let weak_buffer = buffer_entity.downgrade();
230 async move |_, cx| {
231 while *parse_status.borrow() != language::ParseStatus::Idle {
232 parse_status.changed().await?;
233 }
234 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
235 }
236 });
237
238 let parse_task = cx.background_spawn(async move {
239 let snapshot = snapshot_task.await?;
240 let rope = snapshot.text.as_rope().clone();
241
242 anyhow::Ok((
243 declarations_in_buffer(&snapshot)
244 .into_iter()
245 .map(|item| {
246 (
247 item.parent_index,
248 BufferDeclaration::from_outline(item, &rope),
249 )
250 })
251 .collect::<Vec<_>>(),
252 rope,
253 ))
254 });
255
256 let task = cx.spawn({
257 async move |this, cx| {
258 let Ok((declarations, rope)) = parse_task.await else {
259 return;
260 };
261
262 this.update(cx, move |this, cx| {
263 this.with_state(cx, move |state| {
264 let buffer_state = state
265 .buffers
266 .entry(buffer_id)
267 .or_insert_with(Default::default);
268
269 SyntaxIndexState::remove_buffer_declarations(
270 &buffer_state.declarations,
271 &mut state.declarations,
272 &mut state.identifiers,
273 );
274
275 let mut new_ids = Vec::with_capacity(declarations.len());
276 state.declarations.reserve(declarations.len());
277 for (parent_index, mut declaration) in declarations {
278 declaration.parent = parent_index
279 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
280
281 let identifier = declaration.identifier.clone();
282 let declaration_id = state.declarations.insert(Declaration::Buffer {
283 rope: rope.clone(),
284 buffer_id,
285 declaration,
286 project_entry_id,
287 });
288 new_ids.push(declaration_id);
289
290 state
291 .identifiers
292 .entry(identifier)
293 .or_default()
294 .insert(declaration_id);
295 }
296
297 buffer_state.declarations = new_ids;
298 });
299 })
300 .ok();
301 }
302 });
303
304 self.with_state(cx, move |state| {
305 state
306 .buffers
307 .entry(buffer_id)
308 .or_insert_with(Default::default)
309 .task = Some(task)
310 });
311 }
312
313 fn update_file(
314 &mut self,
315 entry_id: ProjectEntryId,
316 project_path: ProjectPath,
317 cx: &mut Context<Self>,
318 ) {
319 let Some(project) = self.project.upgrade() else {
320 return;
321 };
322 let project = project.read(cx);
323 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
324 return;
325 };
326 let language_registry = project.languages().clone();
327
328 let snapshot_task = worktree.update(cx, |worktree, cx| {
329 let load_task = worktree.load_file(&project_path.path, cx);
330 cx.spawn(async move |_this, cx| {
331 let loaded_file = load_task.await?;
332 let language = language_registry
333 .language_for_file_path(&project_path.path)
334 .await
335 .ok();
336
337 let buffer = cx.new(|cx| {
338 let mut buffer = Buffer::local(loaded_file.text, cx);
339 buffer.set_language(language, cx);
340 buffer
341 })?;
342
343 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
344 while *parse_status.borrow() != language::ParseStatus::Idle {
345 parse_status.changed().await?;
346 }
347
348 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
349 })
350 });
351
352 let parse_task = cx.background_spawn(async move {
353 let snapshot = snapshot_task.await?;
354 let rope = snapshot.as_rope();
355 let declarations = declarations_in_buffer(&snapshot)
356 .into_iter()
357 .map(|item| (item.parent_index, FileDeclaration::from_outline(item, rope)))
358 .collect::<Vec<_>>();
359 anyhow::Ok(declarations)
360 });
361
362 let task = cx.spawn({
363 async move |this, cx| {
364 // TODO: how to handle errors?
365 let Ok(declarations) = parse_task.await else {
366 return;
367 };
368 this.update(cx, |this, cx| {
369 this.with_state(cx, move |state| {
370 let file_state =
371 state.files.entry(entry_id).or_insert_with(Default::default);
372
373 for old_declaration_id in &file_state.declarations {
374 let Some(declaration) = state.declarations.remove(*old_declaration_id)
375 else {
376 debug_panic!("declaration not found");
377 continue;
378 };
379 if let Some(identifier_declarations) =
380 state.identifiers.get_mut(declaration.identifier())
381 {
382 identifier_declarations.remove(old_declaration_id);
383 }
384 }
385
386 let mut new_ids = Vec::with_capacity(declarations.len());
387 state.declarations.reserve(declarations.len());
388
389 for (parent_index, mut declaration) in declarations {
390 declaration.parent = parent_index
391 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
392
393 let identifier = declaration.identifier.clone();
394 let declaration_id = state.declarations.insert(Declaration::File {
395 project_entry_id: entry_id,
396 declaration,
397 });
398 new_ids.push(declaration_id);
399
400 state
401 .identifiers
402 .entry(identifier)
403 .or_default()
404 .insert(declaration_id);
405 }
406
407 file_state.declarations = new_ids;
408 });
409 })
410 .ok();
411 }
412 });
413
414 self.with_state(cx, move |state| {
415 state
416 .files
417 .entry(entry_id)
418 .or_insert_with(Default::default)
419 .task = Some(task);
420 });
421 }
422}
423
424impl SyntaxIndexState {
425 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
426 self.declarations.get(id)
427 }
428
429 /// Returns declarations for the identifier. If the limit is exceeded, returns an empty vector.
430 ///
431 /// TODO: Consider doing some pre-ranking and instead truncating when N is exceeded.
432 pub fn declarations_for_identifier<const N: usize>(
433 &self,
434 identifier: &Identifier,
435 ) -> Vec<Declaration> {
436 // make sure to not have a large stack allocation
437 assert!(N < 32);
438
439 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
440 return vec![];
441 };
442
443 let mut result = Vec::with_capacity(N);
444 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
445 let mut file_declarations = Vec::new();
446
447 for declaration_id in declaration_ids {
448 let declaration = self.declarations.get(*declaration_id);
449 let Some(declaration) = some_or_debug_panic(declaration) else {
450 continue;
451 };
452 match declaration {
453 Declaration::Buffer {
454 project_entry_id, ..
455 } => {
456 included_buffer_entry_ids.push(*project_entry_id);
457 result.push(declaration.clone());
458 if result.len() == N {
459 return Vec::new();
460 }
461 }
462 Declaration::File {
463 project_entry_id, ..
464 } => {
465 if !included_buffer_entry_ids.contains(&project_entry_id) {
466 file_declarations.push(declaration.clone());
467 }
468 }
469 }
470 }
471
472 for declaration in file_declarations {
473 match declaration {
474 Declaration::File {
475 project_entry_id, ..
476 } => {
477 if !included_buffer_entry_ids.contains(&project_entry_id) {
478 result.push(declaration);
479
480 if result.len() == N {
481 return Vec::new();
482 }
483 }
484 }
485 Declaration::Buffer { .. } => {}
486 }
487 }
488
489 result
490 }
491
492 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
493 match declaration {
494 Declaration::File {
495 project_entry_id, ..
496 } => self
497 .files
498 .get(project_entry_id)
499 .map(|file_state| file_state.declarations.len())
500 .unwrap_or_default(),
501 Declaration::Buffer { buffer_id, .. } => self
502 .buffers
503 .get(buffer_id)
504 .map(|buffer_state| buffer_state.declarations.len())
505 .unwrap_or_default(),
506 }
507 }
508
509 fn remove_buffer_declarations(
510 old_declaration_ids: &[DeclarationId],
511 declarations: &mut SlotMap<DeclarationId, Declaration>,
512 identifiers: &mut HashMap<Identifier, HashSet<DeclarationId>>,
513 ) {
514 for old_declaration_id in old_declaration_ids {
515 let Some(declaration) = declarations.remove(*old_declaration_id) else {
516 debug_panic!("declaration not found");
517 continue;
518 };
519 if let Some(identifier_declarations) = identifiers.get_mut(declaration.identifier()) {
520 identifier_declarations.remove(old_declaration_id);
521 }
522 }
523 }
524}
525
526#[cfg(test)]
527mod tests {
528 use super::*;
529 use std::{path::Path, sync::Arc};
530
531 use gpui::TestAppContext;
532 use indoc::indoc;
533 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
534 use project::{FakeFs, Project};
535 use serde_json::json;
536 use settings::SettingsStore;
537 use text::OffsetRangeExt as _;
538 use util::path;
539
540 use crate::syntax_index::SyntaxIndex;
541
542 #[gpui::test]
543 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
544 let (project, index, rust_lang_id) = init_test(cx).await;
545 let main = Identifier {
546 name: "main".into(),
547 language_id: rust_lang_id,
548 };
549
550 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
551 let index_state = index_state.lock().await;
552 cx.update(|cx| {
553 let decls = index_state.declarations_for_identifier::<8>(&main);
554 assert_eq!(decls.len(), 2);
555
556 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
557 assert_eq!(decl.identifier, main.clone());
558 assert_eq!(decl.item_range_in_file, 32..280);
559
560 let decl = expect_file_decl("a.rs", &decls[1], &project, cx);
561 assert_eq!(decl.identifier, main);
562 assert_eq!(decl.item_range_in_file, 0..98);
563 });
564 }
565
566 #[gpui::test]
567 async fn test_parents_in_file(cx: &mut TestAppContext) {
568 let (project, index, rust_lang_id) = init_test(cx).await;
569 let test_process_data = Identifier {
570 name: "test_process_data".into(),
571 language_id: rust_lang_id,
572 };
573
574 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
575 let index_state = index_state.lock().await;
576 cx.update(|cx| {
577 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
578 assert_eq!(decls.len(), 1);
579
580 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
581 assert_eq!(decl.identifier, test_process_data);
582
583 let parent_id = decl.parent.unwrap();
584 let parent = index_state.declaration(parent_id).unwrap();
585 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
586 assert_eq!(
587 parent_decl.identifier,
588 Identifier {
589 name: "tests".into(),
590 language_id: rust_lang_id
591 }
592 );
593 assert_eq!(parent_decl.parent, None);
594 });
595 }
596
597 #[gpui::test]
598 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
599 let (project, index, rust_lang_id) = init_test(cx).await;
600 let test_process_data = Identifier {
601 name: "test_process_data".into(),
602 language_id: rust_lang_id,
603 };
604
605 let buffer = project
606 .update(cx, |project, cx| {
607 let project_path = project.find_project_path("c.rs", cx).unwrap();
608 project.open_buffer(project_path, cx)
609 })
610 .await
611 .unwrap();
612
613 cx.run_until_parked();
614
615 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
616 let index_state = index_state.lock().await;
617 cx.update(|cx| {
618 let decls = index_state.declarations_for_identifier::<8>(&test_process_data);
619 assert_eq!(decls.len(), 1);
620
621 let decl = expect_buffer_decl("c.rs", &decls[0], &project, cx);
622 assert_eq!(decl.identifier, test_process_data);
623
624 let parent_id = decl.parent.unwrap();
625 let parent = index_state.declaration(parent_id).unwrap();
626 let parent_decl = expect_buffer_decl("c.rs", &parent, &project, cx);
627 assert_eq!(
628 parent_decl.identifier,
629 Identifier {
630 name: "tests".into(),
631 language_id: rust_lang_id
632 }
633 );
634 assert_eq!(parent_decl.parent, None);
635 });
636
637 drop(buffer);
638 }
639
640 #[gpui::test]
641 async fn test_declarations_limt(cx: &mut TestAppContext) {
642 let (_, index, rust_lang_id) = init_test(cx).await;
643
644 let index_state = index.read_with(cx, |index, _cx| index.state().clone());
645 let index_state = index_state.lock().await;
646 let decls = index_state.declarations_for_identifier::<1>(&Identifier {
647 name: "main".into(),
648 language_id: rust_lang_id,
649 });
650 assert_eq!(decls.len(), 0);
651 }
652
653 #[gpui::test]
654 async fn test_buffer_shadow(cx: &mut TestAppContext) {
655 let (project, index, rust_lang_id) = init_test(cx).await;
656
657 let main = Identifier {
658 name: "main".into(),
659 language_id: rust_lang_id,
660 };
661
662 let buffer = project
663 .update(cx, |project, cx| {
664 let project_path = project.find_project_path("c.rs", cx).unwrap();
665 project.open_buffer(project_path, cx)
666 })
667 .await
668 .unwrap();
669
670 cx.run_until_parked();
671
672 let index_state_arc = index.read_with(cx, |index, _cx| index.state().clone());
673 {
674 let index_state = index_state_arc.lock().await;
675
676 cx.update(|cx| {
677 let decls = index_state.declarations_for_identifier::<8>(&main);
678 assert_eq!(decls.len(), 2);
679 let decl = expect_buffer_decl("c.rs", &decls[0], &project, cx);
680 assert_eq!(decl.identifier, main);
681 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..280);
682
683 expect_file_decl("a.rs", &decls[1], &project, cx);
684 });
685 }
686
687 // Drop the buffer and wait for release
688 cx.update(|_| {
689 drop(buffer);
690 });
691 cx.run_until_parked();
692
693 let index_state = index_state_arc.lock().await;
694
695 cx.update(|cx| {
696 let decls = index_state.declarations_for_identifier::<8>(&main);
697 assert_eq!(decls.len(), 2);
698 expect_file_decl("c.rs", &decls[0], &project, cx);
699 expect_file_decl("a.rs", &decls[1], &project, cx);
700 });
701 }
702
703 fn expect_buffer_decl<'a>(
704 path: &str,
705 declaration: &'a Declaration,
706 project: &Entity<Project>,
707 cx: &App,
708 ) -> &'a BufferDeclaration {
709 if let Declaration::Buffer {
710 declaration,
711 project_entry_id,
712 ..
713 } = declaration
714 {
715 let project_path = project
716 .read(cx)
717 .path_for_entry(*project_entry_id, cx)
718 .unwrap();
719 assert_eq!(project_path.path.as_ref(), Path::new(path),);
720 declaration
721 } else {
722 panic!("Expected a buffer declaration, found {:?}", declaration);
723 }
724 }
725
726 fn expect_file_decl<'a>(
727 path: &str,
728 declaration: &'a Declaration,
729 project: &Entity<Project>,
730 cx: &App,
731 ) -> &'a FileDeclaration {
732 if let Declaration::File {
733 declaration,
734 project_entry_id: file,
735 } = declaration
736 {
737 assert_eq!(
738 project
739 .read(cx)
740 .path_for_entry(*file, cx)
741 .unwrap()
742 .path
743 .as_ref(),
744 Path::new(path),
745 );
746 declaration
747 } else {
748 panic!("Expected a file declaration, found {:?}", declaration);
749 }
750 }
751
752 async fn init_test(
753 cx: &mut TestAppContext,
754 ) -> (Entity<Project>, Entity<SyntaxIndex>, LanguageId) {
755 cx.update(|cx| {
756 let settings_store = SettingsStore::test(cx);
757 cx.set_global(settings_store);
758 language::init(cx);
759 Project::init_settings(cx);
760 });
761
762 let fs = FakeFs::new(cx.executor());
763 fs.insert_tree(
764 path!("/root"),
765 json!({
766 "a.rs": indoc! {r#"
767 fn main() {
768 let x = 1;
769 let y = 2;
770 let z = add(x, y);
771 println!("Result: {}", z);
772 }
773
774 fn add(a: i32, b: i32) -> i32 {
775 a + b
776 }
777 "#},
778 "b.rs": indoc! {"
779 pub struct Config {
780 pub name: String,
781 pub value: i32,
782 }
783
784 impl Config {
785 pub fn new(name: String, value: i32) -> Self {
786 Config { name, value }
787 }
788 }
789 "},
790 "c.rs": indoc! {r#"
791 use std::collections::HashMap;
792
793 fn main() {
794 let args: Vec<String> = std::env::args().collect();
795 let data: Vec<i32> = args[1..]
796 .iter()
797 .filter_map(|s| s.parse().ok())
798 .collect();
799 let result = process_data(data);
800 println!("{:?}", result);
801 }
802
803 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
804 let mut counts = HashMap::new();
805 for value in data {
806 *counts.entry(value).or_insert(0) += 1;
807 }
808 counts
809 }
810
811 #[cfg(test)]
812 mod tests {
813 use super::*;
814
815 #[test]
816 fn test_process_data() {
817 let data = vec![1, 2, 2, 3];
818 let result = process_data(data);
819 assert_eq!(result.get(&2), Some(&2));
820 }
821 }
822 "#}
823 }),
824 )
825 .await;
826 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
827 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
828 let lang = rust_lang();
829 let lang_id = lang.id();
830 language_registry.add(Arc::new(lang));
831
832 let index = cx.new(|cx| SyntaxIndex::new(&project, cx));
833 cx.run_until_parked();
834
835 (project, index, lang_id)
836 }
837
838 fn rust_lang() -> Language {
839 Language::new(
840 LanguageConfig {
841 name: "Rust".into(),
842 matcher: LanguageMatcher {
843 path_suffixes: vec!["rs".to_string()],
844 ..Default::default()
845 },
846 ..Default::default()
847 },
848 Some(tree_sitter_rust::LANGUAGE.into()),
849 )
850 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
851 .unwrap()
852 }
853}