1use collections::{HashMap, HashSet};
2use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
3use language::{Buffer, BufferEvent, BufferSnapshot};
4use project::buffer_store::{BufferStore, BufferStoreEvent};
5use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
6use project::{PathChange, Project, ProjectEntryId, ProjectPath};
7use slotmap::SlotMap;
8use std::ops::Range;
9use std::sync::Arc;
10use text::Anchor;
11use util::{ResultExt as _, debug_panic, some_or_debug_panic};
12
13use crate::outline::{Identifier, OutlineDeclaration, declarations_in_buffer};
14
15// TODO:
16//
17// * Skip for remote projects
18
19// Potential future improvements:
20//
21// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
22// references are present and their scores.
23
24// Potential future optimizations:
25//
26// * Cache of buffers for files
27//
28// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
29// languages? Will also need to find line boundaries, but that can be done by scanning characters in
30// the flat representation.
31//
32// * Use something similar to slotmap without key versions.
33//
34// * Concurrent slotmap
35//
36// * Use queue for parsing
37
38slotmap::new_key_type! {
39 pub struct DeclarationId;
40}
41
42pub struct TreeSitterIndex {
43 declarations: SlotMap<DeclarationId, Declaration>,
44 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
45 files: HashMap<ProjectEntryId, FileState>,
46 buffers: HashMap<WeakEntity<Buffer>, BufferState>,
47 project: WeakEntity<Project>,
48}
49
50#[derive(Debug, Default)]
51struct FileState {
52 declarations: Vec<DeclarationId>,
53 task: Option<Task<()>>,
54}
55
56#[derive(Default)]
57struct BufferState {
58 declarations: Vec<DeclarationId>,
59 task: Option<Task<()>>,
60}
61
62#[derive(Debug, Clone)]
63pub enum Declaration {
64 File {
65 project_entry_id: ProjectEntryId,
66 declaration: FileDeclaration,
67 },
68 Buffer {
69 buffer: WeakEntity<Buffer>,
70 declaration: BufferDeclaration,
71 },
72}
73
74impl Declaration {
75 fn identifier(&self) -> &Identifier {
76 match self {
77 Declaration::File { declaration, .. } => &declaration.identifier,
78 Declaration::Buffer { declaration, .. } => &declaration.identifier,
79 }
80 }
81}
82
83#[derive(Debug, Clone)]
84pub struct FileDeclaration {
85 pub parent: Option<DeclarationId>,
86 pub identifier: Identifier,
87 pub item_range: Range<usize>,
88 pub signature_range: Range<usize>,
89 pub signature_text: Arc<str>,
90}
91
92#[derive(Debug, Clone)]
93pub struct BufferDeclaration {
94 pub parent: Option<DeclarationId>,
95 pub identifier: Identifier,
96 pub item_range: Range<Anchor>,
97 pub signature_range: Range<Anchor>,
98}
99
100impl TreeSitterIndex {
101 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
102 let mut this = Self {
103 declarations: SlotMap::with_key(),
104 identifiers: HashMap::default(),
105 project: project.downgrade(),
106 files: HashMap::default(),
107 buffers: HashMap::default(),
108 };
109
110 let worktree_store = project.read(cx).worktree_store();
111 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
112 .detach();
113
114 for worktree in worktree_store
115 .read(cx)
116 .worktrees()
117 .map(|w| w.read(cx).snapshot())
118 .collect::<Vec<_>>()
119 {
120 for entry in worktree.files(false, 0) {
121 this.update_file(
122 entry.id,
123 ProjectPath {
124 worktree_id: worktree.id(),
125 path: entry.path.clone(),
126 },
127 cx,
128 );
129 }
130 }
131
132 let buffer_store = project.read(cx).buffer_store().clone();
133 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
134 this.register_buffer(&buffer, cx);
135 }
136 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
137 .detach();
138
139 this
140 }
141
142 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
143 self.declarations.get(id)
144 }
145
146 pub fn declarations_for_identifier<const N: usize>(
147 &self,
148 identifier: Identifier,
149 cx: &App,
150 ) -> Vec<Declaration> {
151 // make sure to not have a large stack allocation
152 assert!(N < 32);
153
154 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
155 return vec![];
156 };
157
158 let mut result = Vec::with_capacity(N);
159 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
160 let mut file_declarations = Vec::new();
161
162 for declaration_id in declaration_ids {
163 let declaration = self.declarations.get(*declaration_id);
164 let Some(declaration) = some_or_debug_panic(declaration) else {
165 continue;
166 };
167 match declaration {
168 Declaration::Buffer { buffer, .. } => {
169 if let Ok(Some(entry_id)) = buffer.read_with(cx, |buffer, cx| {
170 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
171 }) {
172 included_buffer_entry_ids.push(entry_id);
173 result.push(declaration.clone());
174 if result.len() == N {
175 return result;
176 }
177 }
178 }
179 Declaration::File {
180 project_entry_id, ..
181 } => {
182 if !included_buffer_entry_ids.contains(project_entry_id) {
183 file_declarations.push(declaration.clone());
184 }
185 }
186 }
187 }
188
189 for declaration in file_declarations {
190 match declaration {
191 Declaration::File {
192 project_entry_id, ..
193 } => {
194 if !included_buffer_entry_ids.contains(&project_entry_id) {
195 result.push(declaration);
196
197 if result.len() == N {
198 return result;
199 }
200 }
201 }
202 Declaration::Buffer { .. } => {}
203 }
204 }
205
206 result
207 }
208
209 fn handle_worktree_store_event(
210 &mut self,
211 _worktree_store: Entity<WorktreeStore>,
212 event: &WorktreeStoreEvent,
213 cx: &mut Context<Self>,
214 ) {
215 use WorktreeStoreEvent::*;
216 match event {
217 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
218 for (path, entry_id, path_change) in updated_entries_set.iter() {
219 if let PathChange::Removed = path_change {
220 self.files.remove(entry_id);
221 } else {
222 let project_path = ProjectPath {
223 worktree_id: *worktree_id,
224 path: path.clone(),
225 };
226 self.update_file(*entry_id, project_path, cx);
227 }
228 }
229 }
230 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
231 // TODO: Is this needed?
232 self.files.remove(project_entry_id);
233 }
234 _ => {}
235 }
236 }
237
238 fn handle_buffer_store_event(
239 &mut self,
240 _buffer_store: Entity<BufferStore>,
241 event: &BufferStoreEvent,
242 cx: &mut Context<Self>,
243 ) {
244 use BufferStoreEvent::*;
245 match event {
246 BufferAdded(buffer) => self.register_buffer(buffer, cx),
247 BufferOpened { .. }
248 | BufferChangedFilePath { .. }
249 | BufferDropped { .. }
250 | SharedBufferClosed { .. } => {}
251 }
252 }
253
254 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
255 self.buffers
256 .insert(buffer.downgrade(), BufferState::default());
257 let weak_buf = buffer.downgrade();
258 cx.observe_release(buffer, move |this, _buffer, _cx| {
259 this.buffers.remove(&weak_buf);
260 })
261 .detach();
262 cx.subscribe(buffer, Self::handle_buffer_event).detach();
263 self.update_buffer(buffer.clone(), cx);
264 }
265
266 fn handle_buffer_event(
267 &mut self,
268 buffer: Entity<Buffer>,
269 event: &BufferEvent,
270 cx: &mut Context<Self>,
271 ) {
272 match event {
273 BufferEvent::Edited => self.update_buffer(buffer, cx),
274 _ => {}
275 }
276 }
277
278 fn update_buffer(&mut self, buffer: Entity<Buffer>, cx: &Context<Self>) {
279 let mut parse_status = buffer.read(cx).parse_status();
280 let snapshot_task = cx.spawn({
281 let weak_buffer = buffer.downgrade();
282 async move |_, cx| {
283 while *parse_status.borrow() != language::ParseStatus::Idle {
284 parse_status.changed().await?;
285 }
286 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
287 }
288 });
289
290 let parse_task = cx.background_spawn(async move {
291 let snapshot = snapshot_task.await?;
292
293 anyhow::Ok(
294 declarations_in_buffer(&snapshot)
295 .into_iter()
296 .map(|item| {
297 (
298 item.parent_index,
299 BufferDeclaration::from_outline(item, &snapshot),
300 )
301 })
302 .collect::<Vec<_>>(),
303 )
304 });
305
306 let task = cx.spawn({
307 let weak_buffer = buffer.downgrade();
308 async move |this, cx| {
309 let Ok(declarations) = parse_task.await else {
310 return;
311 };
312
313 this.update(cx, |this, _cx| {
314 let buffer_state = this
315 .buffers
316 .entry(weak_buffer.clone())
317 .or_insert_with(Default::default);
318
319 for old_declaration_id in &buffer_state.declarations {
320 let Some(declaration) = this.declarations.remove(*old_declaration_id)
321 else {
322 debug_panic!("declaration not found");
323 continue;
324 };
325 if let Some(identifier_declarations) =
326 this.identifiers.get_mut(declaration.identifier())
327 {
328 identifier_declarations.remove(old_declaration_id);
329 }
330 }
331
332 let mut new_ids = Vec::with_capacity(declarations.len());
333 this.declarations.reserve(declarations.len());
334 for (parent_index, mut declaration) in declarations {
335 declaration.parent = parent_index
336 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
337
338 let identifier = declaration.identifier.clone();
339 let declaration_id = this.declarations.insert(Declaration::Buffer {
340 buffer: weak_buffer.clone(),
341 declaration,
342 });
343 new_ids.push(declaration_id);
344
345 this.identifiers
346 .entry(identifier)
347 .or_default()
348 .insert(declaration_id);
349 }
350
351 buffer_state.declarations = new_ids;
352 })
353 .ok();
354 }
355 });
356
357 self.buffers
358 .entry(buffer.downgrade())
359 .or_insert_with(Default::default)
360 .task = Some(task);
361 }
362
363 fn update_file(
364 &mut self,
365 entry_id: ProjectEntryId,
366 project_path: ProjectPath,
367 cx: &mut Context<Self>,
368 ) {
369 let Some(project) = self.project.upgrade() else {
370 return;
371 };
372 let project = project.read(cx);
373 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
374 return;
375 };
376 let language_registry = project.languages().clone();
377
378 let snapshot_task = worktree.update(cx, |worktree, cx| {
379 let load_task = worktree.load_file(&project_path.path, cx);
380 cx.spawn(async move |_this, cx| {
381 let loaded_file = load_task.await?;
382 let language = language_registry
383 .language_for_file_path(&project_path.path)
384 .await
385 .log_err();
386
387 let buffer = cx.new(|cx| {
388 let mut buffer = Buffer::local(loaded_file.text, cx);
389 buffer.set_language(language, cx);
390 buffer
391 })?;
392
393 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
394 while *parse_status.borrow() != language::ParseStatus::Idle {
395 parse_status.changed().await?;
396 }
397
398 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
399 })
400 });
401
402 let parse_task = cx.background_spawn(async move {
403 let snapshot = snapshot_task.await?;
404 let declarations = declarations_in_buffer(&snapshot)
405 .into_iter()
406 .map(|item| {
407 (
408 item.parent_index,
409 FileDeclaration::from_outline(item, &snapshot),
410 )
411 })
412 .collect::<Vec<_>>();
413 anyhow::Ok(declarations)
414 });
415
416 let task = cx.spawn({
417 async move |this, cx| {
418 // TODO: how to handle errors?
419 let Ok(declarations) = parse_task.await else {
420 return;
421 };
422 this.update(cx, |this, _cx| {
423 let file_state = this.files.entry(entry_id).or_insert_with(Default::default);
424
425 for old_declaration_id in &file_state.declarations {
426 let Some(declaration) = this.declarations.remove(*old_declaration_id)
427 else {
428 debug_panic!("declaration not found");
429 continue;
430 };
431 if let Some(identifier_declarations) =
432 this.identifiers.get_mut(declaration.identifier())
433 {
434 identifier_declarations.remove(old_declaration_id);
435 }
436 }
437
438 let mut new_ids = Vec::with_capacity(declarations.len());
439 this.declarations.reserve(declarations.len());
440
441 for (parent_index, mut declaration) in declarations {
442 declaration.parent = parent_index
443 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
444
445 let identifier = declaration.identifier.clone();
446 let declaration_id = this.declarations.insert(Declaration::File {
447 project_entry_id: entry_id,
448 declaration,
449 });
450 new_ids.push(declaration_id);
451
452 this.identifiers
453 .entry(identifier)
454 .or_default()
455 .insert(declaration_id);
456 }
457
458 file_state.declarations = new_ids;
459 })
460 .ok();
461 }
462 });
463
464 self.files
465 .entry(entry_id)
466 .or_insert_with(Default::default)
467 .task = Some(task);
468 }
469}
470
471impl BufferDeclaration {
472 pub fn from_outline(declaration: OutlineDeclaration, snapshot: &BufferSnapshot) -> Self {
473 // use of anchor_before is a guess that the proper behavior is to expand to include
474 // insertions immediately before the declaration, but not for insertions immediately after
475 Self {
476 parent: None,
477 identifier: declaration.identifier,
478 item_range: snapshot.anchor_before(declaration.item_range.start)
479 ..snapshot.anchor_before(declaration.item_range.end),
480 signature_range: snapshot.anchor_before(declaration.signature_range.start)
481 ..snapshot.anchor_before(declaration.signature_range.end),
482 }
483 }
484}
485
486impl FileDeclaration {
487 pub fn from_outline(
488 declaration: OutlineDeclaration,
489 snapshot: &BufferSnapshot,
490 ) -> FileDeclaration {
491 FileDeclaration {
492 parent: None,
493 identifier: declaration.identifier,
494 item_range: declaration.item_range,
495 signature_text: snapshot
496 .text_for_range(declaration.signature_range.clone())
497 .collect::<String>()
498 .into(),
499 signature_range: declaration.signature_range,
500 }
501 }
502}
503
504#[cfg(test)]
505mod tests {
506 use super::*;
507 use std::{path::Path, sync::Arc};
508
509 use gpui::TestAppContext;
510 use indoc::indoc;
511 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
512 use project::{FakeFs, Project, ProjectItem};
513 use serde_json::json;
514 use settings::SettingsStore;
515 use text::OffsetRangeExt as _;
516 use util::path;
517
518 use crate::tree_sitter_index::TreeSitterIndex;
519
520 #[gpui::test]
521 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
522 let (project, index, rust_lang_id) = init_test(cx).await;
523 let main = Identifier {
524 name: "main".into(),
525 language_id: rust_lang_id,
526 };
527
528 index.read_with(cx, |index, cx| {
529 let decls = index.declarations_for_identifier::<8>(main.clone(), cx);
530 assert_eq!(decls.len(), 2);
531
532 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
533 assert_eq!(decl.identifier, main.clone());
534 assert_eq!(decl.item_range, 32..279);
535
536 let decl = expect_file_decl("a.rs", &decls[1], &project, cx);
537 assert_eq!(decl.identifier, main);
538 assert_eq!(decl.item_range, 0..97);
539 });
540 }
541
542 #[gpui::test]
543 async fn test_parents_in_file(cx: &mut TestAppContext) {
544 let (project, index, rust_lang_id) = init_test(cx).await;
545 let test_process_data = Identifier {
546 name: "test_process_data".into(),
547 language_id: rust_lang_id,
548 };
549
550 index.read_with(cx, |index, cx| {
551 let decls = index.declarations_for_identifier::<8>(test_process_data.clone(), cx);
552 assert_eq!(decls.len(), 1);
553
554 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
555 assert_eq!(decl.identifier, test_process_data);
556
557 let parent_id = decl.parent.unwrap();
558 let parent = index.declaration(parent_id).unwrap();
559 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
560 assert_eq!(
561 parent_decl.identifier,
562 Identifier {
563 name: "tests".into(),
564 language_id: rust_lang_id
565 }
566 );
567 assert_eq!(parent_decl.parent, None);
568 });
569 }
570
571 #[gpui::test]
572 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
573 let (project, index, rust_lang_id) = init_test(cx).await;
574 let test_process_data = Identifier {
575 name: "test_process_data".into(),
576 language_id: rust_lang_id,
577 };
578
579 let buffer = project
580 .update(cx, |project, cx| {
581 let project_path = project.find_project_path("c.rs", cx).unwrap();
582 project.open_buffer(project_path, cx)
583 })
584 .await
585 .unwrap();
586
587 cx.run_until_parked();
588
589 index.read_with(cx, |index, cx| {
590 let decls = index.declarations_for_identifier::<8>(test_process_data.clone(), cx);
591 assert_eq!(decls.len(), 1);
592
593 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
594 assert_eq!(decl.identifier, test_process_data);
595
596 let parent_id = decl.parent.unwrap();
597 let parent = index.declaration(parent_id).unwrap();
598 let parent_decl = expect_buffer_decl("c.rs", &parent, cx);
599 assert_eq!(
600 parent_decl.identifier,
601 Identifier {
602 name: "tests".into(),
603 language_id: rust_lang_id
604 }
605 );
606 assert_eq!(parent_decl.parent, None);
607 });
608
609 drop(buffer);
610 }
611
612 #[gpui::test]
613 async fn test_declarations_limt(cx: &mut TestAppContext) {
614 let (_, index, rust_lang_id) = init_test(cx).await;
615
616 index.read_with(cx, |index, cx| {
617 let decls = index.declarations_for_identifier::<1>(
618 Identifier {
619 name: "main".into(),
620 language_id: rust_lang_id,
621 },
622 cx,
623 );
624 assert_eq!(decls.len(), 1);
625 });
626 }
627
628 #[gpui::test]
629 async fn test_buffer_shadow(cx: &mut TestAppContext) {
630 let (project, index, rust_lang_id) = init_test(cx).await;
631
632 let main = Identifier {
633 name: "main".into(),
634 language_id: rust_lang_id,
635 };
636
637 let buffer = project
638 .update(cx, |project, cx| {
639 let project_path = project.find_project_path("c.rs", cx).unwrap();
640 project.open_buffer(project_path, cx)
641 })
642 .await
643 .unwrap();
644
645 cx.run_until_parked();
646
647 index.read_with(cx, |index, cx| {
648 let decls = index.declarations_for_identifier::<8>(main.clone(), cx);
649 assert_eq!(decls.len(), 2);
650 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
651 assert_eq!(decl.identifier, main);
652 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..279);
653
654 expect_file_decl("a.rs", &decls[1], &project, cx);
655 });
656
657 // Need to trigger flush_effects so that the observe_release handler will run.
658 cx.update(|_cx| {
659 drop(buffer);
660 });
661 cx.run_until_parked();
662
663 index.read_with(cx, |index, cx| {
664 let decls = index.declarations_for_identifier::<8>(main, cx);
665 assert_eq!(decls.len(), 2);
666 expect_file_decl("c.rs", &decls[0], &project, cx);
667 expect_file_decl("a.rs", &decls[1], &project, cx);
668 });
669 }
670
671 fn expect_buffer_decl<'a>(
672 path: &str,
673 declaration: &'a Declaration,
674 cx: &App,
675 ) -> &'a BufferDeclaration {
676 if let Declaration::Buffer {
677 declaration,
678 buffer,
679 } = declaration
680 {
681 assert_eq!(
682 buffer
683 .upgrade()
684 .unwrap()
685 .read(cx)
686 .project_path(cx)
687 .unwrap()
688 .path
689 .as_ref(),
690 Path::new(path),
691 );
692 declaration
693 } else {
694 panic!("Expected a buffer declaration, found {:?}", declaration);
695 }
696 }
697
698 fn expect_file_decl<'a>(
699 path: &str,
700 declaration: &'a Declaration,
701 project: &Entity<Project>,
702 cx: &App,
703 ) -> &'a FileDeclaration {
704 if let Declaration::File {
705 declaration,
706 project_entry_id: file,
707 } = declaration
708 {
709 assert_eq!(
710 project
711 .read(cx)
712 .path_for_entry(*file, cx)
713 .unwrap()
714 .path
715 .as_ref(),
716 Path::new(path),
717 );
718 declaration
719 } else {
720 panic!("Expected a file declaration, found {:?}", declaration);
721 }
722 }
723
724 async fn init_test(
725 cx: &mut TestAppContext,
726 ) -> (Entity<Project>, Entity<TreeSitterIndex>, LanguageId) {
727 cx.update(|cx| {
728 let settings_store = SettingsStore::test(cx);
729 cx.set_global(settings_store);
730 language::init(cx);
731 Project::init_settings(cx);
732 });
733
734 let fs = FakeFs::new(cx.executor());
735 fs.insert_tree(
736 path!("/root"),
737 json!({
738 "a.rs": indoc! {r#"
739 fn main() {
740 let x = 1;
741 let y = 2;
742 let z = add(x, y);
743 println!("Result: {}", z);
744 }
745
746 fn add(a: i32, b: i32) -> i32 {
747 a + b
748 }
749 "#},
750 "b.rs": indoc! {"
751 pub struct Config {
752 pub name: String,
753 pub value: i32,
754 }
755
756 impl Config {
757 pub fn new(name: String, value: i32) -> Self {
758 Config { name, value }
759 }
760 }
761 "},
762 "c.rs": indoc! {r#"
763 use std::collections::HashMap;
764
765 fn main() {
766 let args: Vec<String> = std::env::args().collect();
767 let data: Vec<i32> = args[1..]
768 .iter()
769 .filter_map(|s| s.parse().ok())
770 .collect();
771 let result = process_data(data);
772 println!("{:?}", result);
773 }
774
775 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
776 let mut counts = HashMap::new();
777 for value in data {
778 *counts.entry(value).or_insert(0) += 1;
779 }
780 counts
781 }
782
783 #[cfg(test)]
784 mod tests {
785 use super::*;
786
787 #[test]
788 fn test_process_data() {
789 let data = vec![1, 2, 2, 3];
790 let result = process_data(data);
791 assert_eq!(result.get(&2), Some(&2));
792 }
793 }
794 "#}
795 }),
796 )
797 .await;
798 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
799 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
800 let lang = rust_lang();
801 let lang_id = lang.id();
802 language_registry.add(Arc::new(lang));
803
804 let index = cx.new(|cx| TreeSitterIndex::new(&project, cx));
805 cx.run_until_parked();
806
807 (project, index, lang_id)
808 }
809
810 fn rust_lang() -> Language {
811 Language::new(
812 LanguageConfig {
813 name: "Rust".into(),
814 matcher: LanguageMatcher {
815 path_suffixes: vec!["rs".to_string()],
816 ..Default::default()
817 },
818 ..Default::default()
819 },
820 Some(tree_sitter_rust::LANGUAGE.into()),
821 )
822 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
823 .unwrap()
824 }
825}