1use collections::{HashMap, HashSet};
2use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
3use language::{Buffer, BufferEvent, BufferSnapshot};
4use project::buffer_store::{BufferStore, BufferStoreEvent};
5use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
6use project::{PathChange, Project, ProjectEntryId, ProjectPath};
7use slotmap::SlotMap;
8use std::ops::Range;
9use std::sync::Arc;
10use text::Anchor;
11use util::{ResultExt as _, debug_panic, some_or_debug_panic};
12
13use crate::outline::{Identifier, OutlineDeclaration, declarations_in_buffer};
14
15// TODO:
16//
17// * Skip for remote projects
18
19// Potential future improvements:
20//
21// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
22// references are present and their scores.
23
24// Potential future optimizations:
25//
26// * Cache of buffers for files
27//
28// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
29// languages? Will also need to find line boundaries, but that can be done by scanning characters in
30// the flat representation.
31//
32// * Use something similar to slotmap without key versions.
33//
34// * Concurrent slotmap
35//
36// * Use queue for parsing
37
38slotmap::new_key_type! {
39 pub struct DeclarationId;
40}
41
42pub struct TreeSitterIndex {
43 declarations: SlotMap<DeclarationId, Declaration>,
44 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
45 files: HashMap<ProjectEntryId, FileState>,
46 buffers: HashMap<WeakEntity<Buffer>, BufferState>,
47 project: WeakEntity<Project>,
48}
49
50#[derive(Debug, Default)]
51struct FileState {
52 declarations: Vec<DeclarationId>,
53 task: Option<Task<()>>,
54}
55
56#[derive(Default)]
57struct BufferState {
58 declarations: Vec<DeclarationId>,
59 task: Option<Task<()>>,
60}
61
62#[derive(Debug, Clone)]
63pub enum Declaration {
64 File {
65 project_entry_id: ProjectEntryId,
66 declaration: FileDeclaration,
67 },
68 Buffer {
69 buffer: WeakEntity<Buffer>,
70 declaration: BufferDeclaration,
71 },
72}
73
74impl Declaration {
75 fn identifier(&self) -> &Identifier {
76 match self {
77 Declaration::File { declaration, .. } => &declaration.identifier,
78 Declaration::Buffer { declaration, .. } => &declaration.identifier,
79 }
80 }
81
82 pub fn project_entry_id(&self, cx: &App) -> Option<ProjectEntryId> {
83 match self {
84 Declaration::File {
85 project_entry_id, ..
86 } => Some(*project_entry_id),
87 Declaration::Buffer { buffer, .. } => buffer
88 .read_with(cx, |buffer, _cx| {
89 project::File::from_dyn(buffer.file())
90 .and_then(|file| file.project_entry_id(cx))
91 })
92 .ok()
93 .flatten(),
94 }
95 }
96
97 // todo! pick best return type
98 pub fn item_text(&self, cx: &App) -> Arc<str> {
99 match self {
100 Declaration::File { declaration, .. } => declaration.declaration_text.clone(),
101 Declaration::Buffer {
102 buffer,
103 declaration,
104 } => buffer
105 .read_with(cx, |buffer, _cx| {
106 buffer
107 .text_for_range(declaration.item_range.clone())
108 .collect::<String>()
109 .into()
110 })
111 .unwrap_or_default(),
112 }
113 }
114
115 // todo! pick best return type
116 pub fn signature_text(&self, cx: &App) -> Arc<str> {
117 match self {
118 Declaration::File { declaration, .. } => declaration.signature_text.clone(),
119 Declaration::Buffer {
120 buffer,
121 declaration,
122 } => buffer
123 .read_with(cx, |buffer, _cx| {
124 buffer
125 .text_for_range(declaration.signature_range.clone())
126 .collect::<String>()
127 .into()
128 })
129 .unwrap_or_default(),
130 }
131 }
132}
133
134#[derive(Debug, Clone)]
135pub struct FileDeclaration {
136 pub parent: Option<DeclarationId>,
137 pub identifier: Identifier,
138 pub item_range: Range<usize>,
139 pub signature_range: Range<usize>,
140 // todo! should we just store a range with the declaration text?
141 pub signature_text: Arc<str>,
142 pub declaration_text: Arc<str>,
143}
144
145#[derive(Debug, Clone)]
146pub struct BufferDeclaration {
147 pub parent: Option<DeclarationId>,
148 pub identifier: Identifier,
149 pub item_range: Range<Anchor>,
150 pub signature_range: Range<Anchor>,
151}
152
153impl TreeSitterIndex {
154 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
155 let mut this = Self {
156 declarations: SlotMap::with_key(),
157 identifiers: HashMap::default(),
158 project: project.downgrade(),
159 files: HashMap::default(),
160 buffers: HashMap::default(),
161 };
162
163 let worktree_store = project.read(cx).worktree_store();
164 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
165 .detach();
166
167 for worktree in worktree_store
168 .read(cx)
169 .worktrees()
170 .map(|w| w.read(cx).snapshot())
171 .collect::<Vec<_>>()
172 {
173 for entry in worktree.files(false, 0) {
174 this.update_file(
175 entry.id,
176 ProjectPath {
177 worktree_id: worktree.id(),
178 path: entry.path.clone(),
179 },
180 cx,
181 );
182 }
183 }
184
185 let buffer_store = project.read(cx).buffer_store().clone();
186 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
187 this.register_buffer(&buffer, cx);
188 }
189 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
190 .detach();
191
192 this
193 }
194
195 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
196 self.declarations.get(id)
197 }
198
199 pub fn declarations_for_identifier<const N: usize>(
200 &self,
201 identifier: &Identifier,
202 cx: &App,
203 ) -> Vec<Declaration> {
204 // make sure to not have a large stack allocation
205 assert!(N < 32);
206
207 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
208 return vec![];
209 };
210
211 let mut result = Vec::with_capacity(N);
212 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
213 let mut file_declarations = Vec::new();
214
215 for declaration_id in declaration_ids {
216 let declaration = self.declarations.get(*declaration_id);
217 let Some(declaration) = some_or_debug_panic(declaration) else {
218 continue;
219 };
220 match declaration {
221 Declaration::Buffer { buffer, .. } => {
222 if let Ok(Some(entry_id)) = buffer.read_with(cx, |buffer, cx| {
223 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
224 }) {
225 included_buffer_entry_ids.push(entry_id);
226 result.push(declaration.clone());
227 if result.len() == N {
228 return result;
229 }
230 }
231 }
232 Declaration::File {
233 project_entry_id, ..
234 } => {
235 if !included_buffer_entry_ids.contains(project_entry_id) {
236 file_declarations.push(declaration.clone());
237 }
238 }
239 }
240 }
241
242 for declaration in file_declarations {
243 match declaration {
244 Declaration::File {
245 project_entry_id, ..
246 } => {
247 if !included_buffer_entry_ids.contains(&project_entry_id) {
248 result.push(declaration);
249
250 if result.len() == N {
251 return result;
252 }
253 }
254 }
255 Declaration::Buffer { .. } => {}
256 }
257 }
258
259 result
260 }
261
262 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
263 match declaration {
264 Declaration::File {
265 project_entry_id, ..
266 } => self
267 .files
268 .get(project_entry_id)
269 .map(|file_state| file_state.declarations.len())
270 .unwrap_or_default(),
271 Declaration::Buffer { buffer, .. } => self
272 .buffers
273 .get(buffer)
274 .map(|buffer_state| buffer_state.declarations.len())
275 .unwrap_or_default(),
276 }
277 }
278
279 fn handle_worktree_store_event(
280 &mut self,
281 _worktree_store: Entity<WorktreeStore>,
282 event: &WorktreeStoreEvent,
283 cx: &mut Context<Self>,
284 ) {
285 use WorktreeStoreEvent::*;
286 match event {
287 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
288 for (path, entry_id, path_change) in updated_entries_set.iter() {
289 if let PathChange::Removed = path_change {
290 self.files.remove(entry_id);
291 } else {
292 let project_path = ProjectPath {
293 worktree_id: *worktree_id,
294 path: path.clone(),
295 };
296 self.update_file(*entry_id, project_path, cx);
297 }
298 }
299 }
300 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
301 // TODO: Is this needed?
302 self.files.remove(project_entry_id);
303 }
304 _ => {}
305 }
306 }
307
308 fn handle_buffer_store_event(
309 &mut self,
310 _buffer_store: Entity<BufferStore>,
311 event: &BufferStoreEvent,
312 cx: &mut Context<Self>,
313 ) {
314 use BufferStoreEvent::*;
315 match event {
316 BufferAdded(buffer) => self.register_buffer(buffer, cx),
317 BufferOpened { .. }
318 | BufferChangedFilePath { .. }
319 | BufferDropped { .. }
320 | SharedBufferClosed { .. } => {}
321 }
322 }
323
324 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
325 self.buffers
326 .insert(buffer.downgrade(), BufferState::default());
327 let weak_buf = buffer.downgrade();
328 cx.observe_release(buffer, move |this, _buffer, _cx| {
329 this.buffers.remove(&weak_buf);
330 })
331 .detach();
332 cx.subscribe(buffer, Self::handle_buffer_event).detach();
333 self.update_buffer(buffer.clone(), cx);
334 }
335
336 fn handle_buffer_event(
337 &mut self,
338 buffer: Entity<Buffer>,
339 event: &BufferEvent,
340 cx: &mut Context<Self>,
341 ) {
342 match event {
343 BufferEvent::Edited => self.update_buffer(buffer, cx),
344 _ => {}
345 }
346 }
347
348 fn update_buffer(&mut self, buffer: Entity<Buffer>, cx: &Context<Self>) {
349 let mut parse_status = buffer.read(cx).parse_status();
350 let snapshot_task = cx.spawn({
351 let weak_buffer = buffer.downgrade();
352 async move |_, cx| {
353 while *parse_status.borrow() != language::ParseStatus::Idle {
354 parse_status.changed().await?;
355 }
356 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
357 }
358 });
359
360 let parse_task = cx.background_spawn(async move {
361 let snapshot = snapshot_task.await?;
362
363 anyhow::Ok(
364 declarations_in_buffer(&snapshot)
365 .into_iter()
366 .map(|item| {
367 (
368 item.parent_index,
369 BufferDeclaration::from_outline(item, &snapshot),
370 )
371 })
372 .collect::<Vec<_>>(),
373 )
374 });
375
376 let task = cx.spawn({
377 let weak_buffer = buffer.downgrade();
378 async move |this, cx| {
379 let Ok(declarations) = parse_task.await else {
380 return;
381 };
382
383 this.update(cx, |this, _cx| {
384 let buffer_state = this
385 .buffers
386 .entry(weak_buffer.clone())
387 .or_insert_with(Default::default);
388
389 for old_declaration_id in &buffer_state.declarations {
390 let Some(declaration) = this.declarations.remove(*old_declaration_id)
391 else {
392 debug_panic!("declaration not found");
393 continue;
394 };
395 if let Some(identifier_declarations) =
396 this.identifiers.get_mut(declaration.identifier())
397 {
398 identifier_declarations.remove(old_declaration_id);
399 }
400 }
401
402 let mut new_ids = Vec::with_capacity(declarations.len());
403 this.declarations.reserve(declarations.len());
404 for (parent_index, mut declaration) in declarations {
405 declaration.parent = parent_index
406 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
407
408 let identifier = declaration.identifier.clone();
409 let declaration_id = this.declarations.insert(Declaration::Buffer {
410 buffer: weak_buffer.clone(),
411 declaration,
412 });
413 new_ids.push(declaration_id);
414
415 this.identifiers
416 .entry(identifier)
417 .or_default()
418 .insert(declaration_id);
419 }
420
421 buffer_state.declarations = new_ids;
422 })
423 .ok();
424 }
425 });
426
427 self.buffers
428 .entry(buffer.downgrade())
429 .or_insert_with(Default::default)
430 .task = Some(task);
431 }
432
433 fn update_file(
434 &mut self,
435 entry_id: ProjectEntryId,
436 project_path: ProjectPath,
437 cx: &mut Context<Self>,
438 ) {
439 let Some(project) = self.project.upgrade() else {
440 return;
441 };
442 let project = project.read(cx);
443 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
444 return;
445 };
446 let language_registry = project.languages().clone();
447
448 let snapshot_task = worktree.update(cx, |worktree, cx| {
449 let load_task = worktree.load_file(&project_path.path, cx);
450 cx.spawn(async move |_this, cx| {
451 let loaded_file = load_task.await?;
452 let language = language_registry
453 .language_for_file_path(&project_path.path)
454 .await
455 .log_err();
456
457 let buffer = cx.new(|cx| {
458 let mut buffer = Buffer::local(loaded_file.text, cx);
459 buffer.set_language(language, cx);
460 buffer
461 })?;
462
463 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
464 while *parse_status.borrow() != language::ParseStatus::Idle {
465 parse_status.changed().await?;
466 }
467
468 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
469 })
470 });
471
472 let parse_task = cx.background_spawn(async move {
473 let snapshot = snapshot_task.await?;
474 let declarations = declarations_in_buffer(&snapshot)
475 .into_iter()
476 .map(|item| {
477 (
478 item.parent_index,
479 FileDeclaration::from_outline(item, &snapshot),
480 )
481 })
482 .collect::<Vec<_>>();
483 anyhow::Ok(declarations)
484 });
485
486 let task = cx.spawn({
487 async move |this, cx| {
488 // TODO: how to handle errors?
489 let Ok(declarations) = parse_task.await else {
490 return;
491 };
492 this.update(cx, |this, _cx| {
493 let file_state = this.files.entry(entry_id).or_insert_with(Default::default);
494
495 for old_declaration_id in &file_state.declarations {
496 let Some(declaration) = this.declarations.remove(*old_declaration_id)
497 else {
498 debug_panic!("declaration not found");
499 continue;
500 };
501 if let Some(identifier_declarations) =
502 this.identifiers.get_mut(declaration.identifier())
503 {
504 identifier_declarations.remove(old_declaration_id);
505 }
506 }
507
508 let mut new_ids = Vec::with_capacity(declarations.len());
509 this.declarations.reserve(declarations.len());
510
511 for (parent_index, mut declaration) in declarations {
512 declaration.parent = parent_index
513 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
514
515 let identifier = declaration.identifier.clone();
516 let declaration_id = this.declarations.insert(Declaration::File {
517 project_entry_id: entry_id,
518 declaration,
519 });
520 new_ids.push(declaration_id);
521
522 this.identifiers
523 .entry(identifier)
524 .or_default()
525 .insert(declaration_id);
526 }
527
528 file_state.declarations = new_ids;
529 })
530 .ok();
531 }
532 });
533
534 self.files
535 .entry(entry_id)
536 .or_insert_with(Default::default)
537 .task = Some(task);
538 }
539}
540
541impl BufferDeclaration {
542 pub fn from_outline(declaration: OutlineDeclaration, snapshot: &BufferSnapshot) -> Self {
543 // use of anchor_before is a guess that the proper behavior is to expand to include
544 // insertions immediately before the declaration, but not for insertions immediately after
545 Self {
546 parent: None,
547 identifier: declaration.identifier,
548 item_range: snapshot.anchor_before(declaration.item_range.start)
549 ..snapshot.anchor_before(declaration.item_range.end),
550 signature_range: snapshot.anchor_before(declaration.signature_range.start)
551 ..snapshot.anchor_before(declaration.signature_range.end),
552 }
553 }
554}
555
556impl FileDeclaration {
557 pub fn from_outline(
558 declaration: OutlineDeclaration,
559 snapshot: &BufferSnapshot,
560 ) -> FileDeclaration {
561 FileDeclaration {
562 parent: None,
563 identifier: declaration.identifier,
564 signature_text: snapshot
565 .text_for_range(declaration.signature_range.clone())
566 .collect::<String>()
567 .into(),
568 signature_range: declaration.signature_range,
569 declaration_text: snapshot
570 .text_for_range(declaration.item_range.clone())
571 .collect::<String>()
572 .into(),
573 item_range: declaration.item_range,
574 }
575 }
576}
577
578#[cfg(test)]
579mod tests {
580 use super::*;
581 use std::{path::Path, sync::Arc};
582
583 use futures::channel::oneshot;
584 use gpui::TestAppContext;
585 use indoc::indoc;
586 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
587 use project::{FakeFs, Project, ProjectItem};
588 use serde_json::json;
589 use settings::SettingsStore;
590 use text::OffsetRangeExt as _;
591 use util::path;
592
593 use crate::tree_sitter_index::TreeSitterIndex;
594
595 #[gpui::test]
596 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
597 let (project, index, rust_lang_id) = init_test(cx).await;
598 let main = Identifier {
599 name: "main".into(),
600 language_id: rust_lang_id,
601 };
602
603 index.read_with(cx, |index, cx| {
604 let decls = index.declarations_for_identifier::<8>(&main, cx);
605 assert_eq!(decls.len(), 2);
606
607 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
608 assert_eq!(decl.identifier, main.clone());
609 assert_eq!(decl.item_range, 32..279);
610
611 let decl = expect_file_decl("a.rs", &decls[1], &project, cx);
612 assert_eq!(decl.identifier, main);
613 assert_eq!(decl.item_range, 0..97);
614 });
615 }
616
617 #[gpui::test]
618 async fn test_parents_in_file(cx: &mut TestAppContext) {
619 let (project, index, rust_lang_id) = init_test(cx).await;
620 let test_process_data = Identifier {
621 name: "test_process_data".into(),
622 language_id: rust_lang_id,
623 };
624
625 index.read_with(cx, |index, cx| {
626 let decls = index.declarations_for_identifier::<8>(&test_process_data, cx);
627 assert_eq!(decls.len(), 1);
628
629 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
630 assert_eq!(decl.identifier, test_process_data);
631
632 let parent_id = decl.parent.unwrap();
633 let parent = index.declaration(parent_id).unwrap();
634 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
635 assert_eq!(
636 parent_decl.identifier,
637 Identifier {
638 name: "tests".into(),
639 language_id: rust_lang_id
640 }
641 );
642 assert_eq!(parent_decl.parent, None);
643 });
644 }
645
646 #[gpui::test]
647 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
648 let (project, index, rust_lang_id) = init_test(cx).await;
649 let test_process_data = Identifier {
650 name: "test_process_data".into(),
651 language_id: rust_lang_id,
652 };
653
654 let buffer = project
655 .update(cx, |project, cx| {
656 let project_path = project.find_project_path("c.rs", cx).unwrap();
657 project.open_buffer(project_path, cx)
658 })
659 .await
660 .unwrap();
661
662 cx.run_until_parked();
663
664 index.read_with(cx, |index, cx| {
665 let decls = index.declarations_for_identifier::<8>(&test_process_data, cx);
666 assert_eq!(decls.len(), 1);
667
668 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
669 assert_eq!(decl.identifier, test_process_data);
670
671 let parent_id = decl.parent.unwrap();
672 let parent = index.declaration(parent_id).unwrap();
673 let parent_decl = expect_buffer_decl("c.rs", &parent, cx);
674 assert_eq!(
675 parent_decl.identifier,
676 Identifier {
677 name: "tests".into(),
678 language_id: rust_lang_id
679 }
680 );
681 assert_eq!(parent_decl.parent, None);
682 });
683
684 drop(buffer);
685 }
686
687 #[gpui::test]
688 async fn test_declarations_limt(cx: &mut TestAppContext) {
689 let (_, index, rust_lang_id) = init_test(cx).await;
690
691 index.read_with(cx, |index, cx| {
692 let decls = index.declarations_for_identifier::<1>(
693 &Identifier {
694 name: "main".into(),
695 language_id: rust_lang_id,
696 },
697 cx,
698 );
699 assert_eq!(decls.len(), 1);
700 });
701 }
702
703 #[gpui::test]
704 async fn test_buffer_shadow(cx: &mut TestAppContext) {
705 let (project, index, rust_lang_id) = init_test(cx).await;
706
707 let main = Identifier {
708 name: "main".into(),
709 language_id: rust_lang_id,
710 };
711
712 let buffer = project
713 .update(cx, |project, cx| {
714 let project_path = project.find_project_path("c.rs", cx).unwrap();
715 project.open_buffer(project_path, cx)
716 })
717 .await
718 .unwrap();
719
720 cx.run_until_parked();
721
722 index.read_with(cx, |index, cx| {
723 let decls = index.declarations_for_identifier::<8>(&main, cx);
724 assert_eq!(decls.len(), 2);
725 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
726 assert_eq!(decl.identifier, main);
727 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..279);
728
729 expect_file_decl("a.rs", &decls[1], &project, cx);
730 });
731
732 // Drop the buffer and wait for release
733 let (release_tx, release_rx) = oneshot::channel();
734 cx.update(|cx| {
735 cx.observe_release(&buffer, |_, _| {
736 release_tx.send(()).ok();
737 })
738 .detach();
739 });
740 drop(buffer);
741 cx.run_until_parked();
742 release_rx.await.ok();
743 cx.run_until_parked();
744
745 index.read_with(cx, |index, cx| {
746 let decls = index.declarations_for_identifier::<8>(&main, cx);
747 assert_eq!(decls.len(), 2);
748 expect_file_decl("c.rs", &decls[0], &project, cx);
749 expect_file_decl("a.rs", &decls[1], &project, cx);
750 });
751 }
752
753 fn expect_buffer_decl<'a>(
754 path: &str,
755 declaration: &'a Declaration,
756 cx: &App,
757 ) -> &'a BufferDeclaration {
758 if let Declaration::Buffer {
759 declaration,
760 buffer,
761 } = declaration
762 {
763 assert_eq!(
764 buffer
765 .upgrade()
766 .unwrap()
767 .read(cx)
768 .project_path(cx)
769 .unwrap()
770 .path
771 .as_ref(),
772 Path::new(path),
773 );
774 declaration
775 } else {
776 panic!("Expected a buffer declaration, found {:?}", declaration);
777 }
778 }
779
780 fn expect_file_decl<'a>(
781 path: &str,
782 declaration: &'a Declaration,
783 project: &Entity<Project>,
784 cx: &App,
785 ) -> &'a FileDeclaration {
786 if let Declaration::File {
787 declaration,
788 project_entry_id: file,
789 } = declaration
790 {
791 assert_eq!(
792 project
793 .read(cx)
794 .path_for_entry(*file, cx)
795 .unwrap()
796 .path
797 .as_ref(),
798 Path::new(path),
799 );
800 declaration
801 } else {
802 panic!("Expected a file declaration, found {:?}", declaration);
803 }
804 }
805
806 async fn init_test(
807 cx: &mut TestAppContext,
808 ) -> (Entity<Project>, Entity<TreeSitterIndex>, LanguageId) {
809 cx.update(|cx| {
810 let settings_store = SettingsStore::test(cx);
811 cx.set_global(settings_store);
812 language::init(cx);
813 Project::init_settings(cx);
814 });
815
816 let fs = FakeFs::new(cx.executor());
817 fs.insert_tree(
818 path!("/root"),
819 json!({
820 "a.rs": indoc! {r#"
821 fn main() {
822 let x = 1;
823 let y = 2;
824 let z = add(x, y);
825 println!("Result: {}", z);
826 }
827
828 fn add(a: i32, b: i32) -> i32 {
829 a + b
830 }
831 "#},
832 "b.rs": indoc! {"
833 pub struct Config {
834 pub name: String,
835 pub value: i32,
836 }
837
838 impl Config {
839 pub fn new(name: String, value: i32) -> Self {
840 Config { name, value }
841 }
842 }
843 "},
844 "c.rs": indoc! {r#"
845 use std::collections::HashMap;
846
847 fn main() {
848 let args: Vec<String> = std::env::args().collect();
849 let data: Vec<i32> = args[1..]
850 .iter()
851 .filter_map(|s| s.parse().ok())
852 .collect();
853 let result = process_data(data);
854 println!("{:?}", result);
855 }
856
857 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
858 let mut counts = HashMap::new();
859 for value in data {
860 *counts.entry(value).or_insert(0) += 1;
861 }
862 counts
863 }
864
865 #[cfg(test)]
866 mod tests {
867 use super::*;
868
869 #[test]
870 fn test_process_data() {
871 let data = vec![1, 2, 2, 3];
872 let result = process_data(data);
873 assert_eq!(result.get(&2), Some(&2));
874 }
875 }
876 "#}
877 }),
878 )
879 .await;
880 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
882 let lang = rust_lang();
883 let lang_id = lang.id();
884 language_registry.add(Arc::new(lang));
885
886 let index = cx.new(|cx| TreeSitterIndex::new(&project, cx));
887 cx.run_until_parked();
888
889 (project, index, lang_id)
890 }
891
892 fn rust_lang() -> Language {
893 Language::new(
894 LanguageConfig {
895 name: "Rust".into(),
896 matcher: LanguageMatcher {
897 path_suffixes: vec!["rs".to_string()],
898 ..Default::default()
899 },
900 ..Default::default()
901 },
902 Some(tree_sitter_rust::LANGUAGE.into()),
903 )
904 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
905 .unwrap()
906 }
907}