1use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
2use lazy_static::lazy_static;
3use parking_lot::Mutex;
4use std::{
5 borrow::Cow,
6 cell::RefCell,
7 cmp::{Ordering, Reverse},
8 collections::BinaryHeap,
9 ops::{Deref, DerefMut, Range},
10 sync::Arc,
11};
12use sum_tree::{Bias, SeekTarget, SumTree};
13use text::{rope, Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
14use tree_sitter::{
15 Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
16};
17
18thread_local! {
19 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
20}
21
22lazy_static! {
23 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
24}
25
26#[derive(Default)]
27pub struct SyntaxMap {
28 parsed_version: clock::Global,
29 interpolated_version: clock::Global,
30 snapshot: SyntaxSnapshot,
31 language_registry: Option<Arc<LanguageRegistry>>,
32}
33
34#[derive(Clone, Default)]
35pub struct SyntaxSnapshot {
36 layers: SumTree<SyntaxLayer>,
37}
38
39#[derive(Default)]
40pub struct SyntaxMapCaptures<'a> {
41 layers: Vec<SyntaxMapCapturesLayer<'a>>,
42 active_layer_count: usize,
43 grammars: Vec<&'a Grammar>,
44}
45
46#[derive(Default)]
47pub struct SyntaxMapMatches<'a> {
48 layers: Vec<SyntaxMapMatchesLayer<'a>>,
49 active_layer_count: usize,
50 grammars: Vec<&'a Grammar>,
51}
52
53#[derive(Debug)]
54pub struct SyntaxMapCapture<'a> {
55 pub depth: usize,
56 pub node: Node<'a>,
57 pub index: u32,
58 pub grammar_index: usize,
59}
60
61#[derive(Debug)]
62pub struct SyntaxMapMatch<'a> {
63 pub depth: usize,
64 pub pattern_index: usize,
65 pub captures: &'a [QueryCapture<'a>],
66 pub grammar_index: usize,
67}
68
69struct SyntaxMapCapturesLayer<'a> {
70 depth: usize,
71 captures: QueryCaptures<'a, 'a, TextProvider<'a>>,
72 next_capture: Option<QueryCapture<'a>>,
73 grammar_index: usize,
74 _query_cursor: QueryCursorHandle,
75}
76
77struct SyntaxMapMatchesLayer<'a> {
78 depth: usize,
79 next_pattern_index: usize,
80 next_captures: Vec<QueryCapture<'a>>,
81 has_next: bool,
82 matches: QueryMatches<'a, 'a, TextProvider<'a>>,
83 grammar_index: usize,
84 _query_cursor: QueryCursorHandle,
85}
86
87#[derive(Clone)]
88struct SyntaxLayer {
89 depth: usize,
90 range: Range<Anchor>,
91 tree: tree_sitter::Tree,
92 language: Arc<Language>,
93}
94
95#[derive(Debug, Clone)]
96struct SyntaxLayerSummary {
97 min_depth: usize,
98 max_depth: usize,
99 range: Range<Anchor>,
100 last_layer_range: Range<Anchor>,
101}
102
103#[derive(Clone, Debug)]
104struct DepthAndRange(usize, Range<Anchor>);
105
106#[derive(Clone, Debug)]
107struct DepthAndMaxPosition(usize, Anchor);
108
109#[derive(Clone, Debug)]
110struct DepthAndRangeOrMaxPosition(DepthAndRange, DepthAndMaxPosition);
111
112struct ReparseStep {
113 depth: usize,
114 language: Arc<Language>,
115 ranges: Vec<tree_sitter::Range>,
116 range: Range<Anchor>,
117}
118
119#[derive(Debug, PartialEq, Eq)]
120struct ChangedRegion {
121 depth: usize,
122 range: Range<Anchor>,
123}
124
125#[derive(Default)]
126struct ChangeRegionSet(Vec<ChangedRegion>);
127
128struct TextProvider<'a>(&'a Rope);
129
130struct ByteChunks<'a>(rope::Chunks<'a>);
131
132struct QueryCursorHandle(Option<QueryCursor>);
133
134impl SyntaxMap {
135 pub fn new() -> Self {
136 Self::default()
137 }
138
139 pub fn set_language_registry(&mut self, registry: Arc<LanguageRegistry>) {
140 self.language_registry = Some(registry);
141 }
142
143 pub fn snapshot(&self) -> SyntaxSnapshot {
144 self.snapshot.clone()
145 }
146
147 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
148 self.language_registry.clone()
149 }
150
151 pub fn parsed_version(&self) -> clock::Global {
152 self.parsed_version.clone()
153 }
154
155 pub fn interpolate(&mut self, text: &BufferSnapshot) {
156 self.snapshot.interpolate(&self.interpolated_version, text);
157 self.interpolated_version = text.version.clone();
158 }
159
160 #[cfg(test)]
161 pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
162 if !self.interpolated_version.observed_all(&text.version) {
163 self.interpolate(text);
164 }
165
166 self.snapshot.reparse(
167 &self.parsed_version,
168 text,
169 self.language_registry.clone(),
170 language,
171 );
172 self.parsed_version = text.version.clone();
173 }
174
175 pub fn did_parse(&mut self, snapshot: SyntaxSnapshot, version: clock::Global) {
176 self.interpolated_version = version.clone();
177 self.parsed_version = version;
178 self.snapshot = snapshot;
179 }
180
181 pub fn clear(&mut self) {
182 self.snapshot = SyntaxSnapshot::default();
183 }
184}
185
186impl SyntaxSnapshot {
187 pub fn is_empty(&self) -> bool {
188 self.layers.is_empty()
189 }
190
191 pub fn interpolate(&mut self, from_version: &clock::Global, text: &BufferSnapshot) {
192 let edits = text
193 .edits_since::<(usize, Point)>(&from_version)
194 .collect::<Vec<_>>();
195 if edits.is_empty() {
196 return;
197 }
198
199 let mut layers = SumTree::new();
200 let mut edits_for_depth = &edits[..];
201 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
202 cursor.next(text);
203
204 'outer: loop {
205 let depth = cursor.end(text).max_depth;
206
207 // Preserve any layers at this depth that precede the first edit.
208 if let Some(first_edit) = edits_for_depth.first() {
209 let target = DepthAndMaxPosition(depth, text.anchor_before(first_edit.new.start.0));
210 if target.cmp(&cursor.start(), text).is_gt() {
211 let slice = cursor.slice(&target, Bias::Left, text);
212 layers.push_tree(slice, text);
213 }
214 }
215 // If this layer follows all of the edits, then preserve it and any
216 // subsequent layers at this same depth.
217 else {
218 let slice = cursor.slice(
219 &DepthAndRange(depth + 1, Anchor::MIN..Anchor::MAX),
220 Bias::Left,
221 text,
222 );
223 layers.push_tree(slice, text);
224 edits_for_depth = &edits[..];
225 continue;
226 };
227
228 let layer = if let Some(layer) = cursor.item() {
229 layer
230 } else {
231 break;
232 };
233
234 let mut endpoints = text
235 .summaries_for_anchors::<(usize, Point), _>([&layer.range.start, &layer.range.end]);
236 let layer_range = endpoints.next().unwrap()..endpoints.next().unwrap();
237 let start_byte = layer_range.start.0;
238 let start_point = layer_range.start.1;
239 let end_byte = layer_range.end.0;
240
241 // Ignore edits that end before the start of this layer, and don't consider them
242 // for any subsequent layers at this same depth.
243 loop {
244 if let Some(edit) = edits_for_depth.first() {
245 if edit.new.end.0 < start_byte {
246 edits_for_depth = &edits_for_depth[1..];
247 } else {
248 break;
249 }
250 } else {
251 continue 'outer;
252 }
253 }
254
255 let mut layer = layer.clone();
256 for edit in edits_for_depth {
257 // Ignore any edits that follow this layer.
258 if edit.new.start.0 > end_byte {
259 break;
260 }
261
262 // Apply any edits that intersect this layer to the layer's syntax tree.
263 let tree_edit = if edit.new.start.0 >= start_byte {
264 tree_sitter::InputEdit {
265 start_byte: edit.new.start.0 - start_byte,
266 old_end_byte: edit.new.start.0 - start_byte
267 + (edit.old.end.0 - edit.old.start.0),
268 new_end_byte: edit.new.end.0 - start_byte,
269 start_position: (edit.new.start.1 - start_point).to_ts_point(),
270 old_end_position: (edit.new.start.1 - start_point
271 + (edit.old.end.1 - edit.old.start.1))
272 .to_ts_point(),
273 new_end_position: (edit.new.end.1 - start_point).to_ts_point(),
274 }
275 } else {
276 tree_sitter::InputEdit {
277 start_byte: 0,
278 old_end_byte: edit.new.end.0 - start_byte,
279 new_end_byte: 0,
280 start_position: Default::default(),
281 old_end_position: (edit.new.end.1 - start_point).to_ts_point(),
282 new_end_position: Default::default(),
283 }
284 };
285
286 layer.tree.edit(&tree_edit);
287
288 if edit.new.start.0 < start_byte {
289 break;
290 }
291 }
292
293 debug_assert!(
294 layer.tree.root_node().end_byte() <= text.len(),
295 "tree's size {}, is larger than text size {}",
296 layer.tree.root_node().end_byte(),
297 text.len(),
298 );
299
300 layers.push(layer, text);
301 cursor.next(text);
302 }
303
304 layers.push_tree(cursor.suffix(&text), &text);
305 drop(cursor);
306 self.layers = layers;
307 }
308
309 pub fn reparse(
310 &mut self,
311 from_version: &clock::Global,
312 text: &BufferSnapshot,
313 registry: Option<Arc<LanguageRegistry>>,
314 language: Arc<Language>,
315 ) {
316 let edits = text.edits_since::<usize>(from_version).collect::<Vec<_>>();
317 let max_depth = self.layers.summary().max_depth;
318 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
319 cursor.next(&text);
320 let mut layers = SumTree::new();
321
322 let mut changed_regions = ChangeRegionSet::default();
323 let mut queue = BinaryHeap::new();
324 queue.push(ReparseStep {
325 depth: 0,
326 language: language.clone(),
327 ranges: Vec::new(),
328 range: Anchor::MIN..Anchor::MAX,
329 });
330
331 loop {
332 let step = queue.pop();
333 let (depth, range) = if let Some(step) = &step {
334 (step.depth, step.range.clone())
335 } else {
336 (max_depth + 1, Anchor::MAX..Anchor::MAX)
337 };
338
339 let target = DepthAndRange(depth, range.clone());
340 let mut done = cursor.item().is_none();
341 while !done && target.cmp(&cursor.end(text), &text).is_gt() {
342 done = true;
343
344 let bounded_target =
345 DepthAndRangeOrMaxPosition(target.clone(), changed_regions.start_position());
346 if bounded_target.cmp(&cursor.start(), &text).is_gt() {
347 let slice = cursor.slice(&bounded_target, Bias::Left, text);
348 if !slice.is_empty() {
349 layers.push_tree(slice, &text);
350 if changed_regions.prune(cursor.end(text), text) {
351 done = false;
352 }
353 }
354 }
355
356 while target.cmp(&cursor.end(text), text).is_gt() {
357 let layer = if let Some(layer) = cursor.item() {
358 layer
359 } else {
360 break;
361 };
362
363 if changed_regions.intersects(&layer, text) {
364 changed_regions.insert(
365 ChangedRegion {
366 depth: depth + 1,
367 range: layer.range.clone(),
368 },
369 text,
370 );
371 } else {
372 layers.push(layer.clone(), text);
373 }
374
375 cursor.next(text);
376 if changed_regions.prune(cursor.end(text), text) {
377 done = false;
378 }
379 }
380 }
381
382 let (ranges, language) = if let Some(step) = step {
383 (step.ranges, step.language)
384 } else {
385 break;
386 };
387
388 let start_point;
389 let start_byte;
390 let end_byte;
391 if let Some((first, last)) = ranges.first().zip(ranges.last()) {
392 start_point = first.start_point;
393 start_byte = first.start_byte;
394 end_byte = last.end_byte;
395 } else {
396 start_point = Point::zero().to_ts_point();
397 start_byte = 0;
398 end_byte = text.len();
399 };
400
401 let mut old_layer = cursor.item();
402 if let Some(layer) = old_layer {
403 if layer.range.to_offset(text) == (start_byte..end_byte) {
404 cursor.next(&text);
405 } else {
406 old_layer = None;
407 }
408 }
409
410 let grammar = if let Some(grammar) = language.grammar.as_deref() {
411 grammar
412 } else {
413 continue;
414 };
415
416 let tree;
417 let changed_ranges;
418 if let Some(old_layer) = old_layer {
419 tree = parse_text(
420 grammar,
421 text.as_rope(),
422 Some(old_layer.tree.clone()),
423 ranges,
424 );
425 changed_ranges = join_ranges(
426 edits
427 .iter()
428 .map(|e| e.new.clone())
429 .filter(|range| range.start < end_byte && range.end > start_byte),
430 old_layer
431 .tree
432 .changed_ranges(&tree)
433 .map(|r| start_byte + r.start_byte..start_byte + r.end_byte),
434 );
435 } else {
436 tree = parse_text(grammar, text.as_rope(), None, ranges);
437 changed_ranges = vec![start_byte..end_byte];
438 }
439
440 layers.push(
441 SyntaxLayer {
442 depth,
443 range,
444 tree: tree.clone(),
445 language: language.clone(),
446 },
447 &text,
448 );
449
450 if let (Some((config, registry)), false) = (
451 grammar.injection_config.as_ref().zip(registry.as_ref()),
452 changed_ranges.is_empty(),
453 ) {
454 let depth = depth + 1;
455 for range in &changed_ranges {
456 changed_regions.insert(
457 ChangedRegion {
458 depth,
459 range: text.anchor_before(range.start)..text.anchor_after(range.end),
460 },
461 text,
462 );
463 }
464 get_injections(
465 config,
466 text,
467 tree.root_node_with_offset(start_byte, start_point),
468 registry,
469 depth,
470 &changed_ranges,
471 &mut queue,
472 );
473 }
474 }
475
476 drop(cursor);
477 self.layers = layers;
478 }
479
480 pub fn single_tree_captures<'a>(
481 range: Range<usize>,
482 text: &'a Rope,
483 tree: &'a Tree,
484 grammar: &'a Grammar,
485 query: fn(&Grammar) -> Option<&Query>,
486 ) -> SyntaxMapCaptures<'a> {
487 SyntaxMapCaptures::new(
488 range.clone(),
489 text,
490 [(grammar, 0, tree.root_node())].into_iter(),
491 query,
492 )
493 }
494
495 pub fn captures<'a>(
496 &'a self,
497 range: Range<usize>,
498 buffer: &'a BufferSnapshot,
499 query: fn(&Grammar) -> Option<&Query>,
500 ) -> SyntaxMapCaptures {
501 SyntaxMapCaptures::new(
502 range.clone(),
503 buffer.as_rope(),
504 self.layers_for_range(range, buffer).into_iter(),
505 query,
506 )
507 }
508
509 pub fn matches<'a>(
510 &'a self,
511 range: Range<usize>,
512 buffer: &'a BufferSnapshot,
513 query: fn(&Grammar) -> Option<&Query>,
514 ) -> SyntaxMapMatches {
515 SyntaxMapMatches::new(
516 range.clone(),
517 buffer.as_rope(),
518 self.layers_for_range(range, buffer).into_iter(),
519 query,
520 )
521 }
522
523 #[cfg(test)]
524 pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> {
525 self.layers_for_range(0..buffer.len(), buffer)
526 }
527
528 pub fn layers_for_range<'a, T: ToOffset>(
529 &self,
530 range: Range<T>,
531 buffer: &BufferSnapshot,
532 ) -> Vec<(&Grammar, usize, Node)> {
533 let start = buffer.anchor_before(range.start.to_offset(buffer));
534 let end = buffer.anchor_after(range.end.to_offset(buffer));
535
536 let mut cursor = self.layers.filter::<_, ()>(|summary| {
537 if summary.max_depth > summary.min_depth {
538 true
539 } else {
540 let is_before_start = summary.range.end.cmp(&start, buffer).is_lt();
541 let is_after_end = summary.range.start.cmp(&end, buffer).is_gt();
542 !is_before_start && !is_after_end
543 }
544 });
545
546 let mut result = Vec::new();
547 cursor.next(buffer);
548 while let Some(layer) = cursor.item() {
549 if let Some(grammar) = &layer.language.grammar {
550 result.push((
551 grammar.as_ref(),
552 layer.depth,
553 layer.tree.root_node_with_offset(
554 layer.range.start.to_offset(buffer),
555 layer.range.start.to_point(buffer).to_ts_point(),
556 ),
557 ));
558 }
559 cursor.next(buffer)
560 }
561
562 result
563 }
564}
565
566impl<'a> SyntaxMapCaptures<'a> {
567 fn new(
568 range: Range<usize>,
569 text: &'a Rope,
570 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
571 query: fn(&Grammar) -> Option<&Query>,
572 ) -> Self {
573 let mut result = Self {
574 layers: Vec::new(),
575 grammars: Vec::new(),
576 active_layer_count: 0,
577 };
578 for (grammar, depth, node) in layers {
579 let query = if let Some(query) = query(grammar) {
580 query
581 } else {
582 continue;
583 };
584
585 let mut query_cursor = QueryCursorHandle::new();
586
587 // TODO - add a Tree-sitter API to remove the need for this.
588 let cursor = unsafe {
589 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
590 };
591
592 cursor.set_byte_range(range.clone());
593 let captures = cursor.captures(query, node, TextProvider(text));
594 let grammar_index = result
595 .grammars
596 .iter()
597 .position(|g| g.id == grammar.id())
598 .unwrap_or_else(|| {
599 result.grammars.push(grammar);
600 result.grammars.len() - 1
601 });
602 let mut layer = SyntaxMapCapturesLayer {
603 depth,
604 grammar_index,
605 next_capture: None,
606 captures,
607 _query_cursor: query_cursor,
608 };
609
610 layer.advance();
611 if layer.next_capture.is_some() {
612 let key = layer.sort_key();
613 let ix = match result.layers[..result.active_layer_count]
614 .binary_search_by_key(&key, |layer| layer.sort_key())
615 {
616 Ok(ix) | Err(ix) => ix,
617 };
618 result.layers.insert(ix, layer);
619 result.active_layer_count += 1;
620 } else {
621 result.layers.push(layer);
622 }
623 }
624
625 result
626 }
627
628 pub fn grammars(&self) -> &[&'a Grammar] {
629 &self.grammars
630 }
631
632 pub fn peek(&self) -> Option<SyntaxMapCapture<'a>> {
633 let layer = self.layers[..self.active_layer_count].first()?;
634 let capture = layer.next_capture?;
635 Some(SyntaxMapCapture {
636 depth: layer.depth,
637 grammar_index: layer.grammar_index,
638 index: capture.index,
639 node: capture.node,
640 })
641 }
642
643 pub fn advance(&mut self) -> bool {
644 let layer = if let Some(layer) = self.layers[..self.active_layer_count].first_mut() {
645 layer
646 } else {
647 return false;
648 };
649
650 layer.advance();
651 if layer.next_capture.is_some() {
652 let key = layer.sort_key();
653 let i = 1 + self.layers[1..self.active_layer_count]
654 .iter()
655 .position(|later_layer| key < later_layer.sort_key())
656 .unwrap_or(self.active_layer_count - 1);
657 self.layers[0..i].rotate_left(1);
658 } else {
659 self.layers[0..self.active_layer_count].rotate_left(1);
660 self.active_layer_count -= 1;
661 }
662
663 true
664 }
665
666 pub fn set_byte_range(&mut self, range: Range<usize>) {
667 for layer in &mut self.layers {
668 layer.captures.set_byte_range(range.clone());
669 if let Some(capture) = &layer.next_capture {
670 if capture.node.end_byte() > range.start {
671 continue;
672 }
673 }
674 layer.advance();
675 }
676 self.layers.sort_unstable_by_key(|layer| layer.sort_key());
677 self.active_layer_count = self
678 .layers
679 .iter()
680 .position(|layer| layer.next_capture.is_none())
681 .unwrap_or(self.layers.len());
682 }
683}
684
685impl<'a> SyntaxMapMatches<'a> {
686 fn new(
687 range: Range<usize>,
688 text: &'a Rope,
689 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
690 query: fn(&Grammar) -> Option<&Query>,
691 ) -> Self {
692 let mut result = Self::default();
693 for (grammar, depth, node) in layers {
694 let query = if let Some(query) = query(grammar) {
695 query
696 } else {
697 continue;
698 };
699
700 let mut query_cursor = QueryCursorHandle::new();
701
702 // TODO - add a Tree-sitter API to remove the need for this.
703 let cursor = unsafe {
704 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
705 };
706
707 cursor.set_byte_range(range.clone());
708 let matches = cursor.matches(query, node, TextProvider(text));
709 let grammar_index = result
710 .grammars
711 .iter()
712 .position(|g| g.id == grammar.id())
713 .unwrap_or_else(|| {
714 result.grammars.push(grammar);
715 result.grammars.len() - 1
716 });
717 let mut layer = SyntaxMapMatchesLayer {
718 depth,
719 grammar_index,
720 matches,
721 next_pattern_index: 0,
722 next_captures: Vec::new(),
723 has_next: false,
724 _query_cursor: query_cursor,
725 };
726
727 layer.advance();
728 if layer.has_next {
729 let key = layer.sort_key();
730 let ix = match result.layers[..result.active_layer_count]
731 .binary_search_by_key(&key, |layer| layer.sort_key())
732 {
733 Ok(ix) | Err(ix) => ix,
734 };
735 result.layers.insert(ix, layer);
736 result.active_layer_count += 1;
737 } else {
738 result.layers.push(layer);
739 }
740 }
741 result
742 }
743
744 pub fn grammars(&self) -> &[&'a Grammar] {
745 &self.grammars
746 }
747
748 pub fn peek(&self) -> Option<SyntaxMapMatch> {
749 let layer = self.layers.first()?;
750 if !layer.has_next {
751 return None;
752 }
753 Some(SyntaxMapMatch {
754 depth: layer.depth,
755 grammar_index: layer.grammar_index,
756 pattern_index: layer.next_pattern_index,
757 captures: &layer.next_captures,
758 })
759 }
760
761 pub fn advance(&mut self) -> bool {
762 let layer = if let Some(layer) = self.layers.first_mut() {
763 layer
764 } else {
765 return false;
766 };
767
768 layer.advance();
769 if layer.has_next {
770 let key = layer.sort_key();
771 let i = 1 + self.layers[1..self.active_layer_count]
772 .iter()
773 .position(|later_layer| key < later_layer.sort_key())
774 .unwrap_or(self.active_layer_count - 1);
775 self.layers[0..i].rotate_left(1);
776 } else {
777 self.layers[0..self.active_layer_count].rotate_left(1);
778 self.active_layer_count -= 1;
779 }
780
781 true
782 }
783}
784
785impl<'a> SyntaxMapCapturesLayer<'a> {
786 fn advance(&mut self) {
787 self.next_capture = self.captures.next().map(|(mat, ix)| mat.captures[ix]);
788 }
789
790 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
791 if let Some(capture) = &self.next_capture {
792 let range = capture.node.byte_range();
793 (range.start, Reverse(range.end), self.depth)
794 } else {
795 (usize::MAX, Reverse(0), usize::MAX)
796 }
797 }
798}
799
800impl<'a> SyntaxMapMatchesLayer<'a> {
801 fn advance(&mut self) {
802 if let Some(mat) = self.matches.next() {
803 self.next_captures.clear();
804 self.next_captures.extend_from_slice(&mat.captures);
805 self.next_pattern_index = mat.pattern_index;
806 self.has_next = true;
807 } else {
808 self.has_next = false;
809 }
810 }
811
812 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
813 if self.has_next {
814 let captures = &self.next_captures;
815 if let Some((first, last)) = captures.first().zip(captures.last()) {
816 return (
817 first.node.start_byte(),
818 Reverse(last.node.end_byte()),
819 self.depth,
820 );
821 }
822 }
823 (usize::MAX, Reverse(0), usize::MAX)
824 }
825}
826
827impl<'a> Iterator for SyntaxMapCaptures<'a> {
828 type Item = SyntaxMapCapture<'a>;
829
830 fn next(&mut self) -> Option<Self::Item> {
831 let result = self.peek();
832 self.advance();
833 result
834 }
835}
836
837fn join_ranges(
838 a: impl Iterator<Item = Range<usize>>,
839 b: impl Iterator<Item = Range<usize>>,
840) -> Vec<Range<usize>> {
841 let mut result = Vec::<Range<usize>>::new();
842 let mut a = a.peekable();
843 let mut b = b.peekable();
844 loop {
845 let range = match (a.peek(), b.peek()) {
846 (Some(range_a), Some(range_b)) => {
847 if range_a.start < range_b.start {
848 a.next().unwrap()
849 } else {
850 b.next().unwrap()
851 }
852 }
853 (None, Some(_)) => b.next().unwrap(),
854 (Some(_), None) => a.next().unwrap(),
855 (None, None) => break,
856 };
857
858 if let Some(last) = result.last_mut() {
859 if range.start <= last.end {
860 last.end = last.end.max(range.end);
861 continue;
862 }
863 }
864 result.push(range);
865 }
866 result
867}
868
869fn parse_text(
870 grammar: &Grammar,
871 text: &Rope,
872 old_tree: Option<Tree>,
873 mut ranges: Vec<tree_sitter::Range>,
874) -> Tree {
875 let (start_byte, start_point) = ranges
876 .first()
877 .map(|range| (range.start_byte, Point::from_ts_point(range.start_point)))
878 .unwrap_or_default();
879
880 for range in &mut ranges {
881 range.start_byte -= start_byte;
882 range.end_byte -= start_byte;
883 range.start_point = (Point::from_ts_point(range.start_point) - start_point).to_ts_point();
884 range.end_point = (Point::from_ts_point(range.end_point) - start_point).to_ts_point();
885 }
886
887 PARSER.with(|parser| {
888 let mut parser = parser.borrow_mut();
889 let mut chunks = text.chunks_in_range(start_byte..text.len());
890 parser
891 .set_included_ranges(&ranges)
892 .expect("overlapping ranges");
893 parser
894 .set_language(grammar.ts_language)
895 .expect("incompatible grammar");
896 parser
897 .parse_with(
898 &mut move |offset, _| {
899 chunks.seek(start_byte + offset);
900 chunks.next().unwrap_or("").as_bytes()
901 },
902 old_tree.as_ref(),
903 )
904 .expect("invalid language")
905 })
906}
907
908fn get_injections(
909 config: &InjectionConfig,
910 text: &BufferSnapshot,
911 node: Node,
912 language_registry: &LanguageRegistry,
913 depth: usize,
914 query_ranges: &[Range<usize>],
915 queue: &mut BinaryHeap<ReparseStep>,
916) -> bool {
917 let mut result = false;
918 let mut query_cursor = QueryCursorHandle::new();
919 let mut prev_match = None;
920 for query_range in query_ranges {
921 query_cursor.set_byte_range(query_range.start..query_range.end);
922 for mat in query_cursor.matches(&config.query, node, TextProvider(text.as_rope())) {
923 let content_ranges = mat
924 .nodes_for_capture_index(config.content_capture_ix)
925 .map(|node| node.range())
926 .collect::<Vec<_>>();
927 if content_ranges.is_empty() {
928 continue;
929 }
930
931 // Avoid duplicate matches if two changed ranges intersect the same injection.
932 let content_range =
933 content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
934 if let Some((last_pattern_ix, last_range)) = &prev_match {
935 if mat.pattern_index == *last_pattern_ix && content_range == *last_range {
936 continue;
937 }
938 }
939 prev_match = Some((mat.pattern_index, content_range.clone()));
940
941 let language_name = config.languages_by_pattern_ix[mat.pattern_index]
942 .as_ref()
943 .map(|s| Cow::Borrowed(s.as_ref()))
944 .or_else(|| {
945 let ix = config.language_capture_ix?;
946 let node = mat.nodes_for_capture_index(ix).next()?;
947 Some(Cow::Owned(text.text_for_range(node.byte_range()).collect()))
948 });
949
950 if let Some(language_name) = language_name {
951 if let Some(language) = language_registry.get_language(language_name.as_ref()) {
952 result = true;
953 let range = text.anchor_before(content_range.start)
954 ..text.anchor_after(content_range.end);
955 queue.push(ReparseStep {
956 depth,
957 language,
958 ranges: content_ranges,
959 range,
960 })
961 }
962 }
963 }
964 }
965 result
966}
967
968impl std::ops::Deref for SyntaxMap {
969 type Target = SyntaxSnapshot;
970
971 fn deref(&self) -> &Self::Target {
972 &self.snapshot
973 }
974}
975
976impl PartialEq for ReparseStep {
977 fn eq(&self, _: &Self) -> bool {
978 false
979 }
980}
981
982impl Eq for ReparseStep {}
983
984impl PartialOrd for ReparseStep {
985 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
986 Some(self.cmp(&other))
987 }
988}
989
990impl Ord for ReparseStep {
991 fn cmp(&self, other: &Self) -> Ordering {
992 let range_a = self.range();
993 let range_b = other.range();
994 Ord::cmp(&other.depth, &self.depth)
995 .then_with(|| Ord::cmp(&range_b.start, &range_a.start))
996 .then_with(|| Ord::cmp(&range_a.end, &range_b.end))
997 }
998}
999
1000impl ReparseStep {
1001 fn range(&self) -> Range<usize> {
1002 let start = self.ranges.first().map_or(0, |r| r.start_byte);
1003 let end = self.ranges.last().map_or(0, |r| r.end_byte);
1004 start..end
1005 }
1006}
1007
1008impl ChangedRegion {
1009 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
1010 let range_a = &self.range;
1011 let range_b = &other.range;
1012 Ord::cmp(&self.depth, &other.depth)
1013 .then_with(|| range_a.start.cmp(&range_b.start, buffer))
1014 .then_with(|| range_b.end.cmp(&range_a.end, buffer))
1015 }
1016}
1017
1018impl ChangeRegionSet {
1019 fn start_position(&self) -> DepthAndMaxPosition {
1020 self.0
1021 .first()
1022 .map_or(DepthAndMaxPosition(usize::MAX, Anchor::MAX), |region| {
1023 DepthAndMaxPosition(region.depth, region.range.start)
1024 })
1025 }
1026
1027 fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
1028 for region in &self.0 {
1029 if region.depth < layer.depth {
1030 continue;
1031 }
1032 if region.depth > layer.depth {
1033 break;
1034 }
1035 if region.range.end.cmp(&layer.range.start, text).is_le() {
1036 continue;
1037 }
1038 if region.range.start.cmp(&layer.range.end, text).is_ge() {
1039 break;
1040 }
1041 return true;
1042 }
1043 false
1044 }
1045
1046 fn insert(&mut self, region: ChangedRegion, text: &BufferSnapshot) {
1047 if let Err(ix) = self.0.binary_search_by(|probe| probe.cmp(®ion, text)) {
1048 self.0.insert(ix, region);
1049 }
1050 }
1051
1052 fn prune(&mut self, summary: SyntaxLayerSummary, text: &BufferSnapshot) -> bool {
1053 let prev_len = self.0.len();
1054 self.0.retain(|region| {
1055 region.depth > summary.max_depth
1056 || (region.depth == summary.max_depth
1057 && region
1058 .range
1059 .end
1060 .cmp(&summary.last_layer_range.start, text)
1061 .is_gt())
1062 });
1063 self.0.len() < prev_len
1064 }
1065}
1066
1067impl Default for SyntaxLayerSummary {
1068 fn default() -> Self {
1069 Self {
1070 max_depth: 0,
1071 min_depth: 0,
1072 range: Anchor::MAX..Anchor::MIN,
1073 last_layer_range: Anchor::MIN..Anchor::MAX,
1074 }
1075 }
1076}
1077
1078impl sum_tree::Summary for SyntaxLayerSummary {
1079 type Context = BufferSnapshot;
1080
1081 fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
1082 if other.max_depth > self.max_depth {
1083 self.max_depth = other.max_depth;
1084 self.range = other.range.clone();
1085 } else {
1086 if other.range.start.cmp(&self.range.start, buffer).is_lt() {
1087 self.range.start = other.range.start;
1088 }
1089 if other.range.end.cmp(&self.range.end, buffer).is_gt() {
1090 self.range.end = other.range.end;
1091 }
1092 }
1093 self.last_layer_range = other.last_layer_range.clone();
1094 }
1095}
1096
1097impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRange {
1098 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1099 Ord::cmp(&self.0, &cursor_location.max_depth)
1100 .then_with(|| {
1101 self.1
1102 .start
1103 .cmp(&cursor_location.last_layer_range.start, buffer)
1104 })
1105 .then_with(|| {
1106 cursor_location
1107 .last_layer_range
1108 .end
1109 .cmp(&self.1.end, buffer)
1110 })
1111 }
1112}
1113
1114impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndMaxPosition {
1115 fn cmp(&self, cursor_location: &SyntaxLayerSummary, text: &BufferSnapshot) -> Ordering {
1116 Ord::cmp(&self.0, &cursor_location.max_depth)
1117 .then_with(|| self.1.cmp(&cursor_location.range.end, text))
1118 }
1119}
1120
1121impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRangeOrMaxPosition {
1122 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1123 if self.1.cmp(cursor_location, buffer).is_le() {
1124 return Ordering::Less;
1125 } else {
1126 self.0.cmp(cursor_location, buffer)
1127 }
1128 }
1129}
1130
1131impl sum_tree::Item for SyntaxLayer {
1132 type Summary = SyntaxLayerSummary;
1133
1134 fn summary(&self) -> Self::Summary {
1135 SyntaxLayerSummary {
1136 min_depth: self.depth,
1137 max_depth: self.depth,
1138 range: self.range.clone(),
1139 last_layer_range: self.range.clone(),
1140 }
1141 }
1142}
1143
1144impl std::fmt::Debug for SyntaxLayer {
1145 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1146 f.debug_struct("SyntaxLayer")
1147 .field("depth", &self.depth)
1148 .field("range", &self.range)
1149 .field("tree", &self.tree)
1150 .finish()
1151 }
1152}
1153
1154impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1155 type I = ByteChunks<'a>;
1156
1157 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1158 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1159 }
1160}
1161
1162impl<'a> Iterator for ByteChunks<'a> {
1163 type Item = &'a [u8];
1164
1165 fn next(&mut self) -> Option<Self::Item> {
1166 self.0.next().map(str::as_bytes)
1167 }
1168}
1169
1170impl QueryCursorHandle {
1171 pub(crate) fn new() -> Self {
1172 let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
1173 cursor.set_match_limit(64);
1174 QueryCursorHandle(Some(cursor))
1175 }
1176}
1177
1178impl Deref for QueryCursorHandle {
1179 type Target = QueryCursor;
1180
1181 fn deref(&self) -> &Self::Target {
1182 self.0.as_ref().unwrap()
1183 }
1184}
1185
1186impl DerefMut for QueryCursorHandle {
1187 fn deref_mut(&mut self) -> &mut Self::Target {
1188 self.0.as_mut().unwrap()
1189 }
1190}
1191
1192impl Drop for QueryCursorHandle {
1193 fn drop(&mut self) {
1194 let mut cursor = self.0.take().unwrap();
1195 cursor.set_byte_range(0..usize::MAX);
1196 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1197 QUERY_CURSORS.lock().push(cursor)
1198 }
1199}
1200
1201pub(crate) trait ToTreeSitterPoint {
1202 fn to_ts_point(self) -> tree_sitter::Point;
1203 fn from_ts_point(point: tree_sitter::Point) -> Self;
1204}
1205
1206impl ToTreeSitterPoint for Point {
1207 fn to_ts_point(self) -> tree_sitter::Point {
1208 tree_sitter::Point::new(self.row as usize, self.column as usize)
1209 }
1210
1211 fn from_ts_point(point: tree_sitter::Point) -> Self {
1212 Point::new(point.row as u32, point.column as u32)
1213 }
1214}
1215
1216#[cfg(test)]
1217mod tests {
1218 use super::*;
1219 use crate::LanguageConfig;
1220 use text::{Buffer, Point};
1221 use unindent::Unindent as _;
1222 use util::test::marked_text_ranges;
1223
1224 #[gpui::test]
1225 fn test_syntax_map_layers_for_range() {
1226 let registry = Arc::new(LanguageRegistry::test());
1227 let language = Arc::new(rust_lang());
1228 registry.add(language.clone());
1229
1230 let mut buffer = Buffer::new(
1231 0,
1232 0,
1233 r#"
1234 fn a() {
1235 assert_eq!(
1236 b(vec![C {}]),
1237 vec![d.e],
1238 );
1239 println!("{}", f(|_| true));
1240 }
1241 "#
1242 .unindent(),
1243 );
1244
1245 let mut syntax_map = SyntaxMap::new();
1246 syntax_map.set_language_registry(registry.clone());
1247 syntax_map.reparse(language.clone(), &buffer);
1248
1249 assert_layers_for_range(
1250 &syntax_map,
1251 &buffer,
1252 Point::new(2, 0)..Point::new(2, 0),
1253 &[
1254 "...(function_item ... (block (expression_statement (macro_invocation...",
1255 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1256 ],
1257 );
1258 assert_layers_for_range(
1259 &syntax_map,
1260 &buffer,
1261 Point::new(2, 14)..Point::new(2, 16),
1262 &[
1263 "...(function_item ...",
1264 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1265 "...(array_expression (struct_expression ...",
1266 ],
1267 );
1268 assert_layers_for_range(
1269 &syntax_map,
1270 &buffer,
1271 Point::new(3, 14)..Point::new(3, 16),
1272 &[
1273 "...(function_item ...",
1274 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1275 "...(array_expression (field_expression ...",
1276 ],
1277 );
1278 assert_layers_for_range(
1279 &syntax_map,
1280 &buffer,
1281 Point::new(5, 12)..Point::new(5, 16),
1282 &[
1283 "...(function_item ...",
1284 "...(call_expression ... (arguments (closure_expression ...",
1285 ],
1286 );
1287
1288 // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
1289 let macro_name_range = range_for_text(&buffer, "vec!");
1290 buffer.edit([(macro_name_range, "&")]);
1291 syntax_map.interpolate(&buffer);
1292 syntax_map.reparse(language.clone(), &buffer);
1293
1294 assert_layers_for_range(
1295 &syntax_map,
1296 &buffer,
1297 Point::new(2, 14)..Point::new(2, 16),
1298 &[
1299 "...(function_item ...",
1300 "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
1301 ],
1302 );
1303
1304 // Put the vec! macro back, adding back the syntactic layer.
1305 buffer.undo();
1306 syntax_map.interpolate(&buffer);
1307 syntax_map.reparse(language.clone(), &buffer);
1308
1309 assert_layers_for_range(
1310 &syntax_map,
1311 &buffer,
1312 Point::new(2, 14)..Point::new(2, 16),
1313 &[
1314 "...(function_item ...",
1315 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1316 "...(array_expression (struct_expression ...",
1317 ],
1318 );
1319 }
1320
1321 #[gpui::test]
1322 fn test_typing_multiple_new_injections() {
1323 let (buffer, syntax_map) = test_edit_sequence(&[
1324 "fn a() { dbg }",
1325 "fn a() { dbg«!» }",
1326 "fn a() { dbg!«()» }",
1327 "fn a() { dbg!(«b») }",
1328 "fn a() { dbg!(b«.») }",
1329 "fn a() { dbg!(b.«c») }",
1330 "fn a() { dbg!(b.c«()») }",
1331 "fn a() { dbg!(b.c(«vec»)) }",
1332 "fn a() { dbg!(b.c(vec«!»)) }",
1333 "fn a() { dbg!(b.c(vec!«[]»)) }",
1334 "fn a() { dbg!(b.c(vec![«d»])) }",
1335 "fn a() { dbg!(b.c(vec![d«.»])) }",
1336 "fn a() { dbg!(b.c(vec![d.«e»])) }",
1337 ]);
1338
1339 assert_capture_ranges(
1340 &syntax_map,
1341 &buffer,
1342 &["field"],
1343 "fn a() { dbg!(b.«c»(vec![d.«e»])) }",
1344 );
1345 }
1346
1347 #[gpui::test]
1348 fn test_pasting_new_injection_line_between_others() {
1349 let (buffer, syntax_map) = test_edit_sequence(&[
1350 "
1351 fn a() {
1352 b!(B {});
1353 c!(C {});
1354 d!(D {});
1355 e!(E {});
1356 f!(F {});
1357 g!(G {});
1358 }
1359 ",
1360 "
1361 fn a() {
1362 b!(B {});
1363 c!(C {});
1364 d!(D {});
1365 « h!(H {});
1366 » e!(E {});
1367 f!(F {});
1368 g!(G {});
1369 }
1370 ",
1371 ]);
1372
1373 assert_capture_ranges(
1374 &syntax_map,
1375 &buffer,
1376 &["struct"],
1377 "
1378 fn a() {
1379 b!(«B {}»);
1380 c!(«C {}»);
1381 d!(«D {}»);
1382 h!(«H {}»);
1383 e!(«E {}»);
1384 f!(«F {}»);
1385 g!(«G {}»);
1386 }
1387 ",
1388 );
1389 }
1390
1391 #[gpui::test]
1392 fn test_joining_injections_with_child_injections() {
1393 let (buffer, syntax_map) = test_edit_sequence(&[
1394 "
1395 fn a() {
1396 b!(
1397 c![one.two.three],
1398 d![four.five.six],
1399 );
1400 e!(
1401 f![seven.eight],
1402 );
1403 }
1404 ",
1405 "
1406 fn a() {
1407 b!(
1408 c![one.two.three],
1409 d![four.five.six],
1410 ˇ f![seven.eight],
1411 );
1412 }
1413 ",
1414 ]);
1415
1416 assert_capture_ranges(
1417 &syntax_map,
1418 &buffer,
1419 &["field"],
1420 "
1421 fn a() {
1422 b!(
1423 c![one.«two».«three»],
1424 d![four.«five».«six»],
1425 f![seven.«eight»],
1426 );
1427 }
1428 ",
1429 );
1430 }
1431
1432 #[gpui::test]
1433 fn test_editing_edges_of_injection() {
1434 test_edit_sequence(&[
1435 "
1436 fn a() {
1437 b!(c!())
1438 }
1439 ",
1440 "
1441 fn a() {
1442 «d»!(c!())
1443 }
1444 ",
1445 "
1446 fn a() {
1447 «e»d!(c!())
1448 }
1449 ",
1450 "
1451 fn a() {
1452 ed!«[»c!()«]»
1453 }
1454 ",
1455 ]);
1456 }
1457
1458 #[gpui::test]
1459 fn test_edits_preceding_and_intersecting_injection() {
1460 test_edit_sequence(&[
1461 //
1462 "const aaaaaaaaaaaa: B = c!(d(e.f));",
1463 "const aˇa: B = c!(d(eˇ));",
1464 ]);
1465 }
1466
1467 #[gpui::test]
1468 fn test_non_local_changes_create_injections() {
1469 test_edit_sequence(&[
1470 "
1471 // a! {
1472 static B: C = d;
1473 // }
1474 ",
1475 "
1476 ˇa! {
1477 static B: C = d;
1478 ˇ}
1479 ",
1480 ]);
1481 }
1482
1483 #[gpui::test]
1484 fn test_creating_many_injections_in_one_edit() {
1485 test_edit_sequence(&[
1486 "
1487 fn a() {
1488 one(Two::three(3));
1489 four(Five::six(6));
1490 seven(Eight::nine(9));
1491 }
1492 ",
1493 "
1494 fn a() {
1495 one«!»(Two::three(3));
1496 four«!»(Five::six(6));
1497 seven«!»(Eight::nine(9));
1498 }
1499 ",
1500 "
1501 fn a() {
1502 one!(Two::three«!»(3));
1503 four!(Five::six«!»(6));
1504 seven!(Eight::nine«!»(9));
1505 }
1506 ",
1507 ]);
1508 }
1509
1510 #[gpui::test]
1511 fn test_editing_across_injection_boundary() {
1512 test_edit_sequence(&[
1513 "
1514 fn one() {
1515 two();
1516 three!(
1517 three.four,
1518 five.six,
1519 );
1520 }
1521 ",
1522 "
1523 fn one() {
1524 two();
1525 th«irty_five![»
1526 three.four,
1527 five.six,
1528 « seven.eight,
1529 ];»
1530 }
1531 ",
1532 ]);
1533 }
1534
1535 fn test_edit_sequence(steps: &[&str]) -> (Buffer, SyntaxMap) {
1536 let registry = Arc::new(LanguageRegistry::test());
1537 let language = Arc::new(rust_lang());
1538 registry.add(language.clone());
1539 let mut buffer = Buffer::new(0, 0, Default::default());
1540
1541 let mut mutated_syntax_map = SyntaxMap::new();
1542 mutated_syntax_map.set_language_registry(registry.clone());
1543 mutated_syntax_map.reparse(language.clone(), &buffer);
1544
1545 for (i, marked_string) in steps.into_iter().enumerate() {
1546 edit_buffer(&mut buffer, &marked_string.unindent());
1547
1548 // Reparse the syntax map
1549 mutated_syntax_map.interpolate(&buffer);
1550 mutated_syntax_map.reparse(language.clone(), &buffer);
1551
1552 // Create a second syntax map from scratch
1553 let mut reference_syntax_map = SyntaxMap::new();
1554 reference_syntax_map.set_language_registry(registry.clone());
1555 reference_syntax_map.reparse(language.clone(), &buffer);
1556
1557 // Compare the mutated syntax map to the new syntax map
1558 let mutated_layers = mutated_syntax_map.layers(&buffer);
1559 let reference_layers = reference_syntax_map.layers(&buffer);
1560 assert_eq!(
1561 mutated_layers.len(),
1562 reference_layers.len(),
1563 "wrong number of layers at step {i}"
1564 );
1565 for (edited_layer, reference_layer) in
1566 mutated_layers.into_iter().zip(reference_layers.into_iter())
1567 {
1568 assert_eq!(
1569 edited_layer.2.to_sexp(),
1570 reference_layer.2.to_sexp(),
1571 "different layer at step {i}"
1572 );
1573 assert_eq!(
1574 edited_layer.2.range(),
1575 reference_layer.2.range(),
1576 "different layer at step {i}"
1577 );
1578 }
1579 }
1580
1581 (buffer, mutated_syntax_map)
1582 }
1583
1584 fn rust_lang() -> Language {
1585 Language::new(
1586 LanguageConfig {
1587 name: "Rust".into(),
1588 path_suffixes: vec!["rs".to_string()],
1589 ..Default::default()
1590 },
1591 Some(tree_sitter_rust::language()),
1592 )
1593 .with_highlights_query(
1594 r#"
1595 (field_identifier) @field
1596 (struct_expression) @struct
1597 "#,
1598 )
1599 .unwrap()
1600 .with_injection_query(
1601 r#"
1602 (macro_invocation
1603 (token_tree) @content
1604 (#set! "language" "rust"))
1605 "#,
1606 )
1607 .unwrap()
1608 }
1609
1610 fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
1611 let start = buffer.as_rope().to_string().find(text).unwrap();
1612 start..start + text.len()
1613 }
1614
1615 fn assert_layers_for_range(
1616 syntax_map: &SyntaxMap,
1617 buffer: &BufferSnapshot,
1618 range: Range<Point>,
1619 expected_layers: &[&str],
1620 ) {
1621 let layers = syntax_map.layers_for_range(range, &buffer);
1622 assert_eq!(
1623 layers.len(),
1624 expected_layers.len(),
1625 "wrong number of layers"
1626 );
1627 for (i, ((_, _, node), expected_s_exp)) in
1628 layers.iter().zip(expected_layers.iter()).enumerate()
1629 {
1630 let actual_s_exp = node.to_sexp();
1631 assert!(
1632 string_contains_sequence(
1633 &actual_s_exp,
1634 &expected_s_exp.split("...").collect::<Vec<_>>()
1635 ),
1636 "layer {i}:\n\nexpected: {expected_s_exp}\nactual: {actual_s_exp}",
1637 );
1638 }
1639 }
1640
1641 fn assert_capture_ranges(
1642 syntax_map: &SyntaxMap,
1643 buffer: &BufferSnapshot,
1644 highlight_query_capture_names: &[&str],
1645 marked_string: &str,
1646 ) {
1647 let mut actual_ranges = Vec::<Range<usize>>::new();
1648 let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
1649 grammar.highlights_query.as_ref()
1650 });
1651 let queries = captures
1652 .grammars()
1653 .iter()
1654 .map(|grammar| grammar.highlights_query.as_ref().unwrap())
1655 .collect::<Vec<_>>();
1656 for capture in captures {
1657 let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
1658 if highlight_query_capture_names.contains(&name.as_str()) {
1659 actual_ranges.push(capture.node.byte_range());
1660 }
1661 }
1662
1663 let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
1664 assert_eq!(text, buffer.text());
1665 assert_eq!(actual_ranges, expected_ranges);
1666 }
1667
1668 fn edit_buffer(buffer: &mut Buffer, marked_string: &str) {
1669 let old_text = buffer.text();
1670 let (new_text, mut ranges) = marked_text_ranges(marked_string, false);
1671 if ranges.is_empty() {
1672 ranges.push(0..new_text.len());
1673 }
1674
1675 assert_eq!(
1676 old_text[..ranges[0].start],
1677 new_text[..ranges[0].start],
1678 "invalid edit"
1679 );
1680
1681 let mut delta = 0;
1682 let mut edits = Vec::new();
1683 let mut ranges = ranges.into_iter().peekable();
1684
1685 while let Some(inserted_range) = ranges.next() {
1686 let new_start = inserted_range.start;
1687 let old_start = (new_start as isize - delta) as usize;
1688
1689 let following_text = if let Some(next_range) = ranges.peek() {
1690 &new_text[inserted_range.end..next_range.start]
1691 } else {
1692 &new_text[inserted_range.end..]
1693 };
1694
1695 let inserted_len = inserted_range.len();
1696 let deleted_len = old_text[old_start..]
1697 .find(following_text)
1698 .expect("invalid edit");
1699
1700 let old_range = old_start..old_start + deleted_len;
1701 edits.push((old_range, new_text[inserted_range].to_string()));
1702 delta += inserted_len as isize - deleted_len as isize;
1703 }
1704
1705 assert_eq!(
1706 old_text.len() as isize + delta,
1707 new_text.len() as isize,
1708 "invalid edit"
1709 );
1710
1711 buffer.edit(edits);
1712 }
1713
1714 pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
1715 let mut last_part_end = 0;
1716 for part in parts {
1717 if let Some(start_ix) = text[last_part_end..].find(part) {
1718 last_part_end = start_ix + part.len();
1719 } else {
1720 return false;
1721 }
1722 }
1723 true
1724 }
1725}