1use collections::{HashMap, HashSet};
2use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity};
3use language::{Buffer, BufferEvent, BufferSnapshot};
4use project::buffer_store::{BufferStore, BufferStoreEvent};
5use project::worktree_store::{WorktreeStore, WorktreeStoreEvent};
6use project::{PathChange, Project, ProjectEntryId, ProjectPath};
7use slotmap::SlotMap;
8use std::borrow::Cow;
9use std::ops::{Deref, Range};
10use std::sync::Arc;
11use text::{Anchor, Bias, OffsetRangeExt, ToOffset};
12use util::{ResultExt as _, debug_panic, some_or_debug_panic};
13
14use crate::outline::{Identifier, OutlineDeclaration, declarations_in_buffer};
15
16// TODO:
17//
18// * Skip for remote projects
19
20// Potential future improvements:
21//
22// * Send multiple selected excerpt ranges. Challenge is that excerpt ranges influence which
23// references are present and their scores.
24
25// Potential future optimizations:
26//
27// * Cache of buffers for files
28//
29// * Parse files directly instead of loading into a Rope. Make SyntaxMap generic to handle embedded
30// languages? Will also need to find line boundaries, but that can be done by scanning characters in
31// the flat representation.
32//
33// * Use something similar to slotmap without key versions.
34//
35// * Concurrent slotmap
36//
37// * Use queue for parsing
38
39const ITEM_TEXT_TRUNCATION_LENGTH: usize = 1024;
40
41slotmap::new_key_type! {
42 pub struct DeclarationId;
43}
44
45pub struct TreeSitterIndex {
46 declarations: SlotMap<DeclarationId, Declaration>,
47 identifiers: HashMap<Identifier, HashSet<DeclarationId>>,
48 files: HashMap<ProjectEntryId, FileState>,
49 buffers: HashMap<WeakEntity<Buffer>, BufferState>,
50 project: WeakEntity<Project>,
51}
52
53#[derive(Debug, Default)]
54struct FileState {
55 declarations: Vec<DeclarationId>,
56 task: Option<Task<()>>,
57}
58
59#[derive(Default)]
60struct BufferState {
61 declarations: Vec<DeclarationId>,
62 task: Option<Task<()>>,
63}
64
65#[derive(Debug, Clone)]
66pub enum Declaration {
67 File {
68 project_entry_id: ProjectEntryId,
69 declaration: FileDeclaration,
70 },
71 Buffer {
72 buffer: WeakEntity<Buffer>,
73 declaration: BufferDeclaration,
74 },
75}
76
77impl Declaration {
78 fn identifier(&self) -> &Identifier {
79 match self {
80 Declaration::File { declaration, .. } => &declaration.identifier,
81 Declaration::Buffer { declaration, .. } => &declaration.identifier,
82 }
83 }
84
85 pub fn project_entry_id(&self, cx: &App) -> Option<ProjectEntryId> {
86 match self {
87 Declaration::File {
88 project_entry_id, ..
89 } => Some(*project_entry_id),
90 Declaration::Buffer { buffer, .. } => buffer
91 .read_with(cx, |buffer, _cx| {
92 project::File::from_dyn(buffer.file())
93 .and_then(|file| file.project_entry_id(cx))
94 })
95 .ok()
96 .flatten(),
97 }
98 }
99
100 pub fn item_text(&self, cx: &App) -> (Cow<'_, str>, bool) {
101 match self {
102 Declaration::File { declaration, .. } => (
103 declaration.text.as_ref().into(),
104 declaration.text_is_truncated,
105 ),
106 Declaration::Buffer {
107 buffer,
108 declaration,
109 } => buffer
110 .read_with(cx, |buffer, _cx| {
111 let (range, is_truncated) = expand_range_to_line_boundaries_and_truncate(
112 &declaration.item_range,
113 ITEM_TEXT_TRUNCATION_LENGTH,
114 buffer.deref(),
115 );
116 (
117 buffer.text_for_range(range).collect::<Cow<str>>(),
118 is_truncated,
119 )
120 })
121 .unwrap_or_default(),
122 }
123 }
124
125 pub fn signature_text(&self, cx: &App) -> (Cow<'_, str>, bool) {
126 match self {
127 Declaration::File { declaration, .. } => (
128 declaration.text[declaration.signature_range_in_text.clone()].into(),
129 declaration.signature_is_truncated,
130 ),
131 Declaration::Buffer {
132 buffer,
133 declaration,
134 } => buffer
135 .read_with(cx, |buffer, _cx| {
136 let (range, is_truncated) = expand_range_to_line_boundaries_and_truncate(
137 &declaration.signature_range,
138 ITEM_TEXT_TRUNCATION_LENGTH,
139 buffer.deref(),
140 );
141 (
142 buffer.text_for_range(range).collect::<Cow<str>>(),
143 is_truncated,
144 )
145 })
146 .unwrap_or_default(),
147 }
148 }
149}
150
151fn expand_range_to_line_boundaries_and_truncate<T: ToOffset>(
152 range: &Range<T>,
153 limit: usize,
154 buffer: &text::BufferSnapshot,
155) -> (Range<usize>, bool) {
156 let mut point_range = range.to_point(buffer);
157 point_range.start.column = 0;
158 point_range.end.row += 1;
159 point_range.end.column = 0;
160
161 let mut item_range = point_range.to_offset(buffer);
162 let is_truncated = item_range.len() > limit;
163 if is_truncated {
164 item_range.end = item_range.start + limit;
165 }
166 item_range.end = buffer.clip_offset(item_range.end, Bias::Left);
167 (item_range, is_truncated)
168}
169
170#[derive(Debug, Clone)]
171pub struct FileDeclaration {
172 pub parent: Option<DeclarationId>,
173 pub identifier: Identifier,
174 /// offset range of the declaration in the file, expanded to line boundaries and truncated
175 pub item_range_in_file: Range<usize>,
176 /// text of `item_range_in_file`
177 pub text: Arc<str>,
178 /// whether `text` was truncated
179 pub text_is_truncated: bool,
180 /// offset range of the signature within `text`
181 pub signature_range_in_text: Range<usize>,
182 /// whether `signature` was truncated
183 pub signature_is_truncated: bool,
184}
185
186#[derive(Debug, Clone)]
187pub struct BufferDeclaration {
188 pub parent: Option<DeclarationId>,
189 pub identifier: Identifier,
190 pub item_range: Range<Anchor>,
191 pub signature_range: Range<Anchor>,
192}
193
194impl TreeSitterIndex {
195 pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
196 let mut this = Self {
197 declarations: SlotMap::with_key(),
198 identifiers: HashMap::default(),
199 project: project.downgrade(),
200 files: HashMap::default(),
201 buffers: HashMap::default(),
202 };
203
204 let worktree_store = project.read(cx).worktree_store();
205 cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
206 .detach();
207
208 for worktree in worktree_store
209 .read(cx)
210 .worktrees()
211 .map(|w| w.read(cx).snapshot())
212 .collect::<Vec<_>>()
213 {
214 for entry in worktree.files(false, 0) {
215 this.update_file(
216 entry.id,
217 ProjectPath {
218 worktree_id: worktree.id(),
219 path: entry.path.clone(),
220 },
221 cx,
222 );
223 }
224 }
225
226 let buffer_store = project.read(cx).buffer_store().clone();
227 for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
228 this.register_buffer(&buffer, cx);
229 }
230 cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
231 .detach();
232
233 this
234 }
235
236 pub fn declaration(&self, id: DeclarationId) -> Option<&Declaration> {
237 self.declarations.get(id)
238 }
239
240 pub fn declarations_for_identifier<const N: usize>(
241 &self,
242 identifier: &Identifier,
243 cx: &App,
244 ) -> Vec<Declaration> {
245 // make sure to not have a large stack allocation
246 assert!(N < 32);
247
248 let Some(declaration_ids) = self.identifiers.get(&identifier) else {
249 return vec![];
250 };
251
252 let mut result = Vec::with_capacity(N);
253 let mut included_buffer_entry_ids = arrayvec::ArrayVec::<_, N>::new();
254 let mut file_declarations = Vec::new();
255
256 for declaration_id in declaration_ids {
257 let declaration = self.declarations.get(*declaration_id);
258 let Some(declaration) = some_or_debug_panic(declaration) else {
259 continue;
260 };
261 match declaration {
262 Declaration::Buffer { buffer, .. } => {
263 if let Ok(Some(entry_id)) = buffer.read_with(cx, |buffer, cx| {
264 project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
265 }) {
266 included_buffer_entry_ids.push(entry_id);
267 result.push(declaration.clone());
268 if result.len() == N {
269 return result;
270 }
271 }
272 }
273 Declaration::File {
274 project_entry_id, ..
275 } => {
276 if !included_buffer_entry_ids.contains(project_entry_id) {
277 file_declarations.push(declaration.clone());
278 }
279 }
280 }
281 }
282
283 for declaration in file_declarations {
284 match declaration {
285 Declaration::File {
286 project_entry_id, ..
287 } => {
288 if !included_buffer_entry_ids.contains(&project_entry_id) {
289 result.push(declaration);
290
291 if result.len() == N {
292 return result;
293 }
294 }
295 }
296 Declaration::Buffer { .. } => {}
297 }
298 }
299
300 result
301 }
302
303 pub fn file_declaration_count(&self, declaration: &Declaration) -> usize {
304 match declaration {
305 Declaration::File {
306 project_entry_id, ..
307 } => self
308 .files
309 .get(project_entry_id)
310 .map(|file_state| file_state.declarations.len())
311 .unwrap_or_default(),
312 Declaration::Buffer { buffer, .. } => self
313 .buffers
314 .get(buffer)
315 .map(|buffer_state| buffer_state.declarations.len())
316 .unwrap_or_default(),
317 }
318 }
319
320 fn handle_worktree_store_event(
321 &mut self,
322 _worktree_store: Entity<WorktreeStore>,
323 event: &WorktreeStoreEvent,
324 cx: &mut Context<Self>,
325 ) {
326 use WorktreeStoreEvent::*;
327 match event {
328 WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
329 for (path, entry_id, path_change) in updated_entries_set.iter() {
330 if let PathChange::Removed = path_change {
331 self.files.remove(entry_id);
332 } else {
333 let project_path = ProjectPath {
334 worktree_id: *worktree_id,
335 path: path.clone(),
336 };
337 self.update_file(*entry_id, project_path, cx);
338 }
339 }
340 }
341 WorktreeDeletedEntry(_worktree_id, project_entry_id) => {
342 // TODO: Is this needed?
343 self.files.remove(project_entry_id);
344 }
345 _ => {}
346 }
347 }
348
349 fn handle_buffer_store_event(
350 &mut self,
351 _buffer_store: Entity<BufferStore>,
352 event: &BufferStoreEvent,
353 cx: &mut Context<Self>,
354 ) {
355 use BufferStoreEvent::*;
356 match event {
357 BufferAdded(buffer) => self.register_buffer(buffer, cx),
358 BufferOpened { .. }
359 | BufferChangedFilePath { .. }
360 | BufferDropped { .. }
361 | SharedBufferClosed { .. } => {}
362 }
363 }
364
365 fn register_buffer(&mut self, buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
366 self.buffers
367 .insert(buffer.downgrade(), BufferState::default());
368 let weak_buf = buffer.downgrade();
369 cx.observe_release(buffer, move |this, _buffer, _cx| {
370 this.buffers.remove(&weak_buf);
371 })
372 .detach();
373 cx.subscribe(buffer, Self::handle_buffer_event).detach();
374 self.update_buffer(buffer.clone(), cx);
375 }
376
377 fn handle_buffer_event(
378 &mut self,
379 buffer: Entity<Buffer>,
380 event: &BufferEvent,
381 cx: &mut Context<Self>,
382 ) {
383 match event {
384 BufferEvent::Edited => self.update_buffer(buffer, cx),
385 _ => {}
386 }
387 }
388
389 fn update_buffer(&mut self, buffer: Entity<Buffer>, cx: &Context<Self>) {
390 let mut parse_status = buffer.read(cx).parse_status();
391 let snapshot_task = cx.spawn({
392 let weak_buffer = buffer.downgrade();
393 async move |_, cx| {
394 while *parse_status.borrow() != language::ParseStatus::Idle {
395 parse_status.changed().await?;
396 }
397 weak_buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
398 }
399 });
400
401 let parse_task = cx.background_spawn(async move {
402 let snapshot = snapshot_task.await?;
403
404 anyhow::Ok(
405 declarations_in_buffer(&snapshot)
406 .into_iter()
407 .map(|item| {
408 (
409 item.parent_index,
410 BufferDeclaration::from_outline(item, &snapshot),
411 )
412 })
413 .collect::<Vec<_>>(),
414 )
415 });
416
417 let task = cx.spawn({
418 let weak_buffer = buffer.downgrade();
419 async move |this, cx| {
420 let Ok(declarations) = parse_task.await else {
421 return;
422 };
423
424 this.update(cx, |this, _cx| {
425 let buffer_state = this
426 .buffers
427 .entry(weak_buffer.clone())
428 .or_insert_with(Default::default);
429
430 for old_declaration_id in &buffer_state.declarations {
431 let Some(declaration) = this.declarations.remove(*old_declaration_id)
432 else {
433 debug_panic!("declaration not found");
434 continue;
435 };
436 if let Some(identifier_declarations) =
437 this.identifiers.get_mut(declaration.identifier())
438 {
439 identifier_declarations.remove(old_declaration_id);
440 }
441 }
442
443 let mut new_ids = Vec::with_capacity(declarations.len());
444 this.declarations.reserve(declarations.len());
445 for (parent_index, mut declaration) in declarations {
446 declaration.parent = parent_index
447 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
448
449 let identifier = declaration.identifier.clone();
450 let declaration_id = this.declarations.insert(Declaration::Buffer {
451 buffer: weak_buffer.clone(),
452 declaration,
453 });
454 new_ids.push(declaration_id);
455
456 this.identifiers
457 .entry(identifier)
458 .or_default()
459 .insert(declaration_id);
460 }
461
462 buffer_state.declarations = new_ids;
463 })
464 .ok();
465 }
466 });
467
468 self.buffers
469 .entry(buffer.downgrade())
470 .or_insert_with(Default::default)
471 .task = Some(task);
472 }
473
474 fn update_file(
475 &mut self,
476 entry_id: ProjectEntryId,
477 project_path: ProjectPath,
478 cx: &mut Context<Self>,
479 ) {
480 let Some(project) = self.project.upgrade() else {
481 return;
482 };
483 let project = project.read(cx);
484 let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx) else {
485 return;
486 };
487 let language_registry = project.languages().clone();
488
489 let snapshot_task = worktree.update(cx, |worktree, cx| {
490 let load_task = worktree.load_file(&project_path.path, cx);
491 cx.spawn(async move |_this, cx| {
492 let loaded_file = load_task.await?;
493 let language = language_registry
494 .language_for_file_path(&project_path.path)
495 .await
496 .log_err();
497
498 let buffer = cx.new(|cx| {
499 let mut buffer = Buffer::local(loaded_file.text, cx);
500 buffer.set_language(language, cx);
501 buffer
502 })?;
503
504 let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
505 while *parse_status.borrow() != language::ParseStatus::Idle {
506 parse_status.changed().await?;
507 }
508
509 buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
510 })
511 });
512
513 let parse_task = cx.background_spawn(async move {
514 let snapshot = snapshot_task.await?;
515 let declarations = declarations_in_buffer(&snapshot)
516 .into_iter()
517 .map(|item| {
518 (
519 item.parent_index,
520 FileDeclaration::from_outline(item, &snapshot),
521 )
522 })
523 .collect::<Vec<_>>();
524 anyhow::Ok(declarations)
525 });
526
527 let task = cx.spawn({
528 async move |this, cx| {
529 // TODO: how to handle errors?
530 let Ok(declarations) = parse_task.await else {
531 return;
532 };
533 this.update(cx, |this, _cx| {
534 let file_state = this.files.entry(entry_id).or_insert_with(Default::default);
535
536 for old_declaration_id in &file_state.declarations {
537 let Some(declaration) = this.declarations.remove(*old_declaration_id)
538 else {
539 debug_panic!("declaration not found");
540 continue;
541 };
542 if let Some(identifier_declarations) =
543 this.identifiers.get_mut(declaration.identifier())
544 {
545 identifier_declarations.remove(old_declaration_id);
546 }
547 }
548
549 let mut new_ids = Vec::with_capacity(declarations.len());
550 this.declarations.reserve(declarations.len());
551
552 for (parent_index, mut declaration) in declarations {
553 declaration.parent = parent_index
554 .and_then(|ix| some_or_debug_panic(new_ids.get(ix).copied()));
555
556 let identifier = declaration.identifier.clone();
557 let declaration_id = this.declarations.insert(Declaration::File {
558 project_entry_id: entry_id,
559 declaration,
560 });
561 new_ids.push(declaration_id);
562
563 this.identifiers
564 .entry(identifier)
565 .or_default()
566 .insert(declaration_id);
567 }
568
569 file_state.declarations = new_ids;
570 })
571 .ok();
572 }
573 });
574
575 self.files
576 .entry(entry_id)
577 .or_insert_with(Default::default)
578 .task = Some(task);
579 }
580}
581
582impl BufferDeclaration {
583 pub fn from_outline(declaration: OutlineDeclaration, snapshot: &BufferSnapshot) -> Self {
584 // use of anchor_before is a guess that the proper behavior is to expand to include
585 // insertions immediately before the declaration, but not for insertions immediately after
586 Self {
587 parent: None,
588 identifier: declaration.identifier,
589 item_range: snapshot.anchor_before(declaration.item_range.start)
590 ..snapshot.anchor_before(declaration.item_range.end),
591 signature_range: snapshot.anchor_before(declaration.signature_range.start)
592 ..snapshot.anchor_before(declaration.signature_range.end),
593 }
594 }
595}
596
597impl FileDeclaration {
598 pub fn from_outline(
599 declaration: OutlineDeclaration,
600 snapshot: &BufferSnapshot,
601 ) -> FileDeclaration {
602 let (item_range_in_file, text_is_truncated) = expand_range_to_line_boundaries_and_truncate(
603 &declaration.item_range,
604 ITEM_TEXT_TRUNCATION_LENGTH,
605 snapshot,
606 );
607
608 // TODO: consider logging if unexpected
609 let signature_start = declaration
610 .signature_range
611 .start
612 .saturating_sub(item_range_in_file.start);
613 let mut signature_end = declaration
614 .signature_range
615 .end
616 .saturating_sub(item_range_in_file.start);
617 let signature_is_truncated = signature_end > item_range_in_file.len();
618 if signature_is_truncated {
619 signature_end = item_range_in_file.len();
620 }
621
622 FileDeclaration {
623 parent: None,
624 identifier: declaration.identifier,
625 signature_range_in_text: signature_start..signature_end,
626 signature_is_truncated,
627 text: snapshot
628 .text_for_range(item_range_in_file.clone())
629 .collect::<String>()
630 .into(),
631 text_is_truncated,
632 item_range_in_file,
633 }
634 }
635}
636
637#[cfg(test)]
638mod tests {
639 use super::*;
640 use std::{path::Path, sync::Arc};
641
642 use futures::channel::oneshot;
643 use gpui::TestAppContext;
644 use indoc::indoc;
645 use language::{Language, LanguageConfig, LanguageId, LanguageMatcher, tree_sitter_rust};
646 use project::{FakeFs, Project, ProjectItem};
647 use serde_json::json;
648 use settings::SettingsStore;
649 use util::path;
650
651 use crate::tree_sitter_index::TreeSitterIndex;
652
653 #[gpui::test]
654 async fn test_unopen_indexed_files(cx: &mut TestAppContext) {
655 let (project, index, rust_lang_id) = init_test(cx).await;
656 let main = Identifier {
657 name: "main".into(),
658 language_id: rust_lang_id,
659 };
660
661 index.read_with(cx, |index, cx| {
662 let decls = index.declarations_for_identifier::<8>(&main, cx);
663 assert_eq!(decls.len(), 2);
664
665 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
666 assert_eq!(decl.identifier, main.clone());
667 assert_eq!(decl.item_range_in_file, 32..280);
668
669 let decl = expect_file_decl("a.rs", &decls[1], &project, cx);
670 assert_eq!(decl.identifier, main);
671 assert_eq!(decl.item_range_in_file, 0..98);
672 });
673 }
674
675 #[gpui::test]
676 async fn test_parents_in_file(cx: &mut TestAppContext) {
677 let (project, index, rust_lang_id) = init_test(cx).await;
678 let test_process_data = Identifier {
679 name: "test_process_data".into(),
680 language_id: rust_lang_id,
681 };
682
683 index.read_with(cx, |index, cx| {
684 let decls = index.declarations_for_identifier::<8>(&test_process_data, cx);
685 assert_eq!(decls.len(), 1);
686
687 let decl = expect_file_decl("c.rs", &decls[0], &project, cx);
688 assert_eq!(decl.identifier, test_process_data);
689
690 let parent_id = decl.parent.unwrap();
691 let parent = index.declaration(parent_id).unwrap();
692 let parent_decl = expect_file_decl("c.rs", &parent, &project, cx);
693 assert_eq!(
694 parent_decl.identifier,
695 Identifier {
696 name: "tests".into(),
697 language_id: rust_lang_id
698 }
699 );
700 assert_eq!(parent_decl.parent, None);
701 });
702 }
703
704 #[gpui::test]
705 async fn test_parents_in_buffer(cx: &mut TestAppContext) {
706 let (project, index, rust_lang_id) = init_test(cx).await;
707 let test_process_data = Identifier {
708 name: "test_process_data".into(),
709 language_id: rust_lang_id,
710 };
711
712 let buffer = project
713 .update(cx, |project, cx| {
714 let project_path = project.find_project_path("c.rs", cx).unwrap();
715 project.open_buffer(project_path, cx)
716 })
717 .await
718 .unwrap();
719
720 cx.run_until_parked();
721
722 index.read_with(cx, |index, cx| {
723 let decls = index.declarations_for_identifier::<8>(&test_process_data, cx);
724 assert_eq!(decls.len(), 1);
725
726 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
727 assert_eq!(decl.identifier, test_process_data);
728
729 let parent_id = decl.parent.unwrap();
730 let parent = index.declaration(parent_id).unwrap();
731 let parent_decl = expect_buffer_decl("c.rs", &parent, cx);
732 assert_eq!(
733 parent_decl.identifier,
734 Identifier {
735 name: "tests".into(),
736 language_id: rust_lang_id
737 }
738 );
739 assert_eq!(parent_decl.parent, None);
740 });
741
742 drop(buffer);
743 }
744
745 #[gpui::test]
746 async fn test_declarations_limt(cx: &mut TestAppContext) {
747 let (_, index, rust_lang_id) = init_test(cx).await;
748
749 index.read_with(cx, |index, cx| {
750 let decls = index.declarations_for_identifier::<1>(
751 &Identifier {
752 name: "main".into(),
753 language_id: rust_lang_id,
754 },
755 cx,
756 );
757 assert_eq!(decls.len(), 1);
758 });
759 }
760
761 #[gpui::test]
762 async fn test_buffer_shadow(cx: &mut TestAppContext) {
763 let (project, index, rust_lang_id) = init_test(cx).await;
764
765 let main = Identifier {
766 name: "main".into(),
767 language_id: rust_lang_id,
768 };
769
770 let buffer = project
771 .update(cx, |project, cx| {
772 let project_path = project.find_project_path("c.rs", cx).unwrap();
773 project.open_buffer(project_path, cx)
774 })
775 .await
776 .unwrap();
777
778 cx.run_until_parked();
779
780 index.read_with(cx, |index, cx| {
781 let decls = index.declarations_for_identifier::<8>(&main, cx);
782 assert_eq!(decls.len(), 2);
783 let decl = expect_buffer_decl("c.rs", &decls[0], cx);
784 assert_eq!(decl.identifier, main);
785 assert_eq!(decl.item_range.to_offset(&buffer.read(cx)), 32..279);
786
787 expect_file_decl("a.rs", &decls[1], &project, cx);
788 });
789
790 // Drop the buffer and wait for release
791 let (release_tx, release_rx) = oneshot::channel();
792 cx.update(|cx| {
793 cx.observe_release(&buffer, |_, _| {
794 release_tx.send(()).ok();
795 })
796 .detach();
797 });
798 drop(buffer);
799 cx.run_until_parked();
800 release_rx.await.ok();
801 cx.run_until_parked();
802
803 index.read_with(cx, |index, cx| {
804 let decls = index.declarations_for_identifier::<8>(&main, cx);
805 assert_eq!(decls.len(), 2);
806 expect_file_decl("c.rs", &decls[0], &project, cx);
807 expect_file_decl("a.rs", &decls[1], &project, cx);
808 });
809 }
810
811 fn expect_buffer_decl<'a>(
812 path: &str,
813 declaration: &'a Declaration,
814 cx: &App,
815 ) -> &'a BufferDeclaration {
816 if let Declaration::Buffer {
817 declaration,
818 buffer,
819 } = declaration
820 {
821 assert_eq!(
822 buffer
823 .upgrade()
824 .unwrap()
825 .read(cx)
826 .project_path(cx)
827 .unwrap()
828 .path
829 .as_ref(),
830 Path::new(path),
831 );
832 declaration
833 } else {
834 panic!("Expected a buffer declaration, found {:?}", declaration);
835 }
836 }
837
838 fn expect_file_decl<'a>(
839 path: &str,
840 declaration: &'a Declaration,
841 project: &Entity<Project>,
842 cx: &App,
843 ) -> &'a FileDeclaration {
844 if let Declaration::File {
845 declaration,
846 project_entry_id: file,
847 } = declaration
848 {
849 assert_eq!(
850 project
851 .read(cx)
852 .path_for_entry(*file, cx)
853 .unwrap()
854 .path
855 .as_ref(),
856 Path::new(path),
857 );
858 declaration
859 } else {
860 panic!("Expected a file declaration, found {:?}", declaration);
861 }
862 }
863
864 async fn init_test(
865 cx: &mut TestAppContext,
866 ) -> (Entity<Project>, Entity<TreeSitterIndex>, LanguageId) {
867 cx.update(|cx| {
868 let settings_store = SettingsStore::test(cx);
869 cx.set_global(settings_store);
870 language::init(cx);
871 Project::init_settings(cx);
872 });
873
874 let fs = FakeFs::new(cx.executor());
875 fs.insert_tree(
876 path!("/root"),
877 json!({
878 "a.rs": indoc! {r#"
879 fn main() {
880 let x = 1;
881 let y = 2;
882 let z = add(x, y);
883 println!("Result: {}", z);
884 }
885
886 fn add(a: i32, b: i32) -> i32 {
887 a + b
888 }
889 "#},
890 "b.rs": indoc! {"
891 pub struct Config {
892 pub name: String,
893 pub value: i32,
894 }
895
896 impl Config {
897 pub fn new(name: String, value: i32) -> Self {
898 Config { name, value }
899 }
900 }
901 "},
902 "c.rs": indoc! {r#"
903 use std::collections::HashMap;
904
905 fn main() {
906 let args: Vec<String> = std::env::args().collect();
907 let data: Vec<i32> = args[1..]
908 .iter()
909 .filter_map(|s| s.parse().ok())
910 .collect();
911 let result = process_data(data);
912 println!("{:?}", result);
913 }
914
915 fn process_data(data: Vec<i32>) -> HashMap<i32, usize> {
916 let mut counts = HashMap::new();
917 for value in data {
918 *counts.entry(value).or_insert(0) += 1;
919 }
920 counts
921 }
922
923 #[cfg(test)]
924 mod tests {
925 use super::*;
926
927 #[test]
928 fn test_process_data() {
929 let data = vec![1, 2, 2, 3];
930 let result = process_data(data);
931 assert_eq!(result.get(&2), Some(&2));
932 }
933 }
934 "#}
935 }),
936 )
937 .await;
938 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
939 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
940 let lang = rust_lang();
941 let lang_id = lang.id();
942 language_registry.add(Arc::new(lang));
943
944 let index = cx.new(|cx| TreeSitterIndex::new(&project, cx));
945 cx.run_until_parked();
946
947 (project, index, lang_id)
948 }
949
950 fn rust_lang() -> Language {
951 Language::new(
952 LanguageConfig {
953 name: "Rust".into(),
954 matcher: LanguageMatcher {
955 path_suffixes: vec!["rs".to_string()],
956 ..Default::default()
957 },
958 ..Default::default()
959 },
960 Some(tree_sitter_rust::LANGUAGE.into()),
961 )
962 .with_outline_query(include_str!("../../languages/src/rust/outline.scm"))
963 .unwrap()
964 }
965}