1use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
2use lazy_static::lazy_static;
3use parking_lot::Mutex;
4use std::{
5 borrow::Cow,
6 cell::RefCell,
7 cmp::{Ordering, Reverse},
8 collections::BinaryHeap,
9 ops::{Deref, DerefMut, Range},
10 sync::Arc,
11};
12use sum_tree::{Bias, SeekTarget, SumTree};
13use text::{rope, Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
14use tree_sitter::{
15 Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
16};
17
18thread_local! {
19 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
20}
21
22lazy_static! {
23 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
24}
25
26#[derive(Default)]
27pub struct SyntaxMap {
28 parsed_version: clock::Global,
29 interpolated_version: clock::Global,
30 snapshot: SyntaxSnapshot,
31 language_registry: Option<Arc<LanguageRegistry>>,
32}
33
34#[derive(Clone, Default)]
35pub struct SyntaxSnapshot {
36 layers: SumTree<SyntaxLayer>,
37}
38
39#[derive(Default)]
40pub struct SyntaxMapCaptures<'a> {
41 layers: Vec<SyntaxMapCapturesLayer<'a>>,
42 active_layer_count: usize,
43 grammars: Vec<&'a Grammar>,
44}
45
46#[derive(Default)]
47pub struct SyntaxMapMatches<'a> {
48 layers: Vec<SyntaxMapMatchesLayer<'a>>,
49 active_layer_count: usize,
50 grammars: Vec<&'a Grammar>,
51}
52
53#[derive(Debug)]
54pub struct SyntaxMapCapture<'a> {
55 pub depth: usize,
56 pub node: Node<'a>,
57 pub index: u32,
58 pub grammar_index: usize,
59}
60
61#[derive(Debug)]
62pub struct SyntaxMapMatch<'a> {
63 pub depth: usize,
64 pub pattern_index: usize,
65 pub captures: &'a [QueryCapture<'a>],
66 pub grammar_index: usize,
67}
68
69struct SyntaxMapCapturesLayer<'a> {
70 depth: usize,
71 captures: QueryCaptures<'a, 'a, TextProvider<'a>>,
72 next_capture: Option<QueryCapture<'a>>,
73 grammar_index: usize,
74 _query_cursor: QueryCursorHandle,
75}
76
77struct SyntaxMapMatchesLayer<'a> {
78 depth: usize,
79 next_pattern_index: usize,
80 next_captures: Vec<QueryCapture<'a>>,
81 has_next: bool,
82 matches: QueryMatches<'a, 'a, TextProvider<'a>>,
83 grammar_index: usize,
84 _query_cursor: QueryCursorHandle,
85}
86
87#[derive(Clone)]
88struct SyntaxLayer {
89 depth: usize,
90 range: Range<Anchor>,
91 tree: tree_sitter::Tree,
92 language: Arc<Language>,
93}
94
95#[derive(Debug, Clone)]
96struct SyntaxLayerSummary {
97 min_depth: usize,
98 max_depth: usize,
99 range: Range<Anchor>,
100 last_layer_range: Range<Anchor>,
101}
102
103#[derive(Clone, Debug)]
104struct DepthAndRange(usize, Range<Anchor>);
105
106#[derive(Clone, Debug)]
107struct DepthAndMaxPosition(usize, Anchor);
108
109#[derive(Clone, Debug)]
110struct DepthAndRangeOrMaxPosition(DepthAndRange, DepthAndMaxPosition);
111
112struct ReparseStep {
113 depth: usize,
114 language: Arc<Language>,
115 ranges: Vec<tree_sitter::Range>,
116 range: Range<Anchor>,
117}
118
119#[derive(Debug, PartialEq, Eq)]
120struct ChangedRegion {
121 depth: usize,
122 range: Range<Anchor>,
123}
124
125#[derive(Default)]
126struct ChangeRegionSet(Vec<ChangedRegion>);
127
128struct TextProvider<'a>(&'a Rope);
129
130struct ByteChunks<'a>(rope::Chunks<'a>);
131
132struct QueryCursorHandle(Option<QueryCursor>);
133
134impl SyntaxMap {
135 pub fn new() -> Self {
136 Self::default()
137 }
138
139 pub fn set_language_registry(&mut self, registry: Arc<LanguageRegistry>) {
140 self.language_registry = Some(registry);
141 }
142
143 pub fn snapshot(&self) -> SyntaxSnapshot {
144 self.snapshot.clone()
145 }
146
147 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
148 self.language_registry.clone()
149 }
150
151 pub fn parsed_version(&self) -> clock::Global {
152 self.parsed_version.clone()
153 }
154
155 pub fn interpolate(&mut self, text: &BufferSnapshot) {
156 self.snapshot.interpolate(&self.interpolated_version, text);
157 self.interpolated_version = text.version.clone();
158 }
159
160 #[cfg(test)]
161 pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
162 self.snapshot.reparse(
163 &self.parsed_version,
164 text,
165 self.language_registry.clone(),
166 language,
167 );
168 self.parsed_version = text.version.clone();
169 self.interpolated_version = text.version.clone();
170 }
171
172 pub fn did_parse(&mut self, snapshot: SyntaxSnapshot, version: clock::Global) {
173 self.interpolated_version = version.clone();
174 self.parsed_version = version;
175 self.snapshot = snapshot;
176 }
177
178 pub fn clear(&mut self) {
179 self.snapshot = SyntaxSnapshot::default();
180 }
181}
182
183impl SyntaxSnapshot {
184 pub fn is_empty(&self) -> bool {
185 self.layers.is_empty()
186 }
187
188 pub fn interpolate(&mut self, from_version: &clock::Global, text: &BufferSnapshot) {
189 let edits = text
190 .anchored_edits_since::<(usize, Point)>(&from_version)
191 .collect::<Vec<_>>();
192 if edits.is_empty() {
193 return;
194 }
195
196 let mut layers = SumTree::new();
197 let mut first_edit_ix_for_depth = 0;
198 let mut prev_depth = 0;
199 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
200 cursor.next(text);
201
202 'outer: loop {
203 let depth = cursor.end(text).max_depth;
204 if depth > prev_depth {
205 first_edit_ix_for_depth = 0;
206 prev_depth = depth;
207 }
208
209 // Preserve any layers at this depth that precede the first edit.
210 if let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) {
211 let target = DepthAndMaxPosition(depth, edit_range.start);
212 if target.cmp(&cursor.start(), text).is_gt() {
213 let slice = cursor.slice(&target, Bias::Left, text);
214 layers.push_tree(slice, text);
215 }
216 }
217 // If this layer follows all of the edits, then preserve it and any
218 // subsequent layers at this same depth.
219 else if cursor.item().is_some() {
220 let slice = cursor.slice(
221 &DepthAndRange(depth + 1, Anchor::MIN..Anchor::MAX),
222 Bias::Left,
223 text,
224 );
225 layers.push_tree(slice, text);
226 continue;
227 };
228
229 let layer = if let Some(layer) = cursor.item() {
230 layer
231 } else {
232 break;
233 };
234 let (start_byte, start_point) = layer.range.start.summary::<(usize, Point)>(text);
235
236
237 // Ignore edits that end before the start of this layer, and don't consider them
238 // for any subsequent layers at this same depth.
239 loop {
240 if let Some((_, edit_range)) = edits.get(first_edit_ix_for_depth) {
241 if edit_range.end.cmp(&layer.range.start, text).is_le() {
242 first_edit_ix_for_depth += 1;
243 } else {
244 break;
245 }
246 } else {
247 continue 'outer;
248 }
249 }
250
251 let mut layer = layer.clone();
252 for (edit, edit_range) in &edits[first_edit_ix_for_depth..] {
253 // Ignore any edits that follow this layer.
254 if edit_range.start.cmp(&layer.range.end, text).is_ge() {
255 break;
256 }
257
258 // Apply any edits that intersect this layer to the layer's syntax tree.
259 let tree_edit = if edit_range.start.cmp(&layer.range.start, text).is_ge() {
260 tree_sitter::InputEdit {
261 start_byte: edit.new.start.0 - start_byte,
262 old_end_byte: edit.new.start.0 - start_byte
263 + (edit.old.end.0 - edit.old.start.0),
264 new_end_byte: edit.new.end.0 - start_byte,
265 start_position: (edit.new.start.1 - start_point).to_ts_point(),
266 old_end_position: (edit.new.start.1 - start_point
267 + (edit.old.end.1 - edit.old.start.1))
268 .to_ts_point(),
269 new_end_position: (edit.new.end.1 - start_point).to_ts_point(),
270 }
271 } else {
272 let node = layer.tree.root_node();
273 tree_sitter::InputEdit {
274 start_byte: 0,
275 old_end_byte: node.end_byte(),
276 new_end_byte: 0,
277 start_position: Default::default(),
278 old_end_position: node.end_position(),
279 new_end_position: Default::default(),
280 }
281 };
282
283 layer.tree.edit(&tree_edit);
284 }
285
286 debug_assert!(
287 layer.tree.root_node().end_byte() <= text.len(),
288 "tree's size {}, is larger than text size {}",
289 layer.tree.root_node().end_byte(),
290 text.len(),
291 );
292
293 layers.push(layer, text);
294 cursor.next(text);
295 }
296
297 layers.push_tree(cursor.suffix(&text), &text);
298 drop(cursor);
299 self.layers = layers;
300 }
301
302 pub fn reparse(
303 &mut self,
304 from_version: &clock::Global,
305 text: &BufferSnapshot,
306 registry: Option<Arc<LanguageRegistry>>,
307 language: Arc<Language>,
308 ) {
309 let edits = text.edits_since::<usize>(from_version).collect::<Vec<_>>();
310 let max_depth = self.layers.summary().max_depth;
311 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
312 cursor.next(&text);
313 let mut layers = SumTree::new();
314
315 let mut changed_regions = ChangeRegionSet::default();
316 let mut queue = BinaryHeap::new();
317 queue.push(ReparseStep {
318 depth: 0,
319 language: language.clone(),
320 ranges: Vec::new(),
321 range: Anchor::MIN..Anchor::MAX,
322 });
323
324 loop {
325 let step = queue.pop();
326 let (depth, range) = if let Some(step) = &step {
327 (step.depth, step.range.clone())
328 } else {
329 (max_depth + 1, Anchor::MAX..Anchor::MAX)
330 };
331
332 let target = DepthAndRange(depth, range.clone());
333 let mut done = cursor.item().is_none();
334 while !done && target.cmp(&cursor.end(text), &text).is_gt() {
335 done = true;
336
337 let bounded_target =
338 DepthAndRangeOrMaxPosition(target.clone(), changed_regions.start_position());
339 if bounded_target.cmp(&cursor.start(), &text).is_gt() {
340 let slice = cursor.slice(&bounded_target, Bias::Left, text);
341 if !slice.is_empty() {
342 layers.push_tree(slice, &text);
343 if changed_regions.prune(cursor.end(text), text) {
344 done = false;
345 }
346 }
347 }
348
349 while target.cmp(&cursor.end(text), text).is_gt() {
350 let layer = if let Some(layer) = cursor.item() {
351 layer
352 } else {
353 break;
354 };
355
356 if changed_regions.intersects(&layer, text) {
357 changed_regions.insert(
358 ChangedRegion {
359 depth: layer.depth + 1,
360 range: layer.range.clone(),
361 },
362 text,
363 );
364 } else {
365 layers.push(layer.clone(), text);
366 }
367
368 cursor.next(text);
369 if changed_regions.prune(cursor.end(text), text) {
370 done = false;
371 }
372 }
373 }
374
375 let (ranges, language) = if let Some(step) = step {
376 (step.ranges, step.language)
377 } else {
378 break;
379 };
380
381 let start_point;
382 let start_byte;
383 let end_byte;
384 if let Some((first, last)) = ranges.first().zip(ranges.last()) {
385 start_point = first.start_point;
386 start_byte = first.start_byte;
387 end_byte = last.end_byte;
388 } else {
389 start_point = Point::zero().to_ts_point();
390 start_byte = 0;
391 end_byte = text.len();
392 };
393
394 let mut old_layer = cursor.item();
395 if let Some(layer) = old_layer {
396 if layer.range.to_offset(text) == (start_byte..end_byte) {
397 cursor.next(&text);
398 } else {
399 old_layer = None;
400 }
401 }
402
403 let grammar = if let Some(grammar) = language.grammar.as_deref() {
404 grammar
405 } else {
406 continue;
407 };
408
409 let tree;
410 let changed_ranges;
411 if let Some(old_layer) = old_layer {
412 tree = parse_text(
413 grammar,
414 text.as_rope(),
415 Some(old_layer.tree.clone()),
416 ranges,
417 );
418 changed_ranges = join_ranges(
419 edits
420 .iter()
421 .map(|e| e.new.clone())
422 .filter(|range| range.start < end_byte && range.end > start_byte),
423 old_layer
424 .tree
425 .changed_ranges(&tree)
426 .map(|r| start_byte + r.start_byte..start_byte + r.end_byte),
427 );
428 } else {
429 tree = parse_text(grammar, text.as_rope(), None, ranges);
430 changed_ranges = vec![start_byte..end_byte];
431 }
432
433 layers.push(
434 SyntaxLayer {
435 depth,
436 range,
437 tree: tree.clone(),
438 language: language.clone(),
439 },
440 &text,
441 );
442
443 if let (Some((config, registry)), false) = (
444 grammar.injection_config.as_ref().zip(registry.as_ref()),
445 changed_ranges.is_empty(),
446 ) {
447 let depth = depth + 1;
448 for range in &changed_ranges {
449 changed_regions.insert(
450 ChangedRegion {
451 depth,
452 range: text.anchor_before(range.start)..text.anchor_after(range.end),
453 },
454 text,
455 );
456 }
457 get_injections(
458 config,
459 text,
460 tree.root_node_with_offset(start_byte, start_point),
461 registry,
462 depth,
463 &changed_ranges,
464 &mut queue,
465 );
466 }
467 }
468
469 drop(cursor);
470 self.layers = layers;
471 }
472
473 pub fn single_tree_captures<'a>(
474 range: Range<usize>,
475 text: &'a Rope,
476 tree: &'a Tree,
477 grammar: &'a Grammar,
478 query: fn(&Grammar) -> Option<&Query>,
479 ) -> SyntaxMapCaptures<'a> {
480 SyntaxMapCaptures::new(
481 range.clone(),
482 text,
483 [(grammar, 0, tree.root_node())].into_iter(),
484 query,
485 )
486 }
487
488 pub fn captures<'a>(
489 &'a self,
490 range: Range<usize>,
491 buffer: &'a BufferSnapshot,
492 query: fn(&Grammar) -> Option<&Query>,
493 ) -> SyntaxMapCaptures {
494 SyntaxMapCaptures::new(
495 range.clone(),
496 buffer.as_rope(),
497 self.layers_for_range(range, buffer).into_iter(),
498 query,
499 )
500 }
501
502 pub fn matches<'a>(
503 &'a self,
504 range: Range<usize>,
505 buffer: &'a BufferSnapshot,
506 query: fn(&Grammar) -> Option<&Query>,
507 ) -> SyntaxMapMatches {
508 SyntaxMapMatches::new(
509 range.clone(),
510 buffer.as_rope(),
511 self.layers_for_range(range, buffer).into_iter(),
512 query,
513 )
514 }
515
516 #[cfg(test)]
517 pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> {
518 self.layers_for_range(0..buffer.len(), buffer)
519 }
520
521 pub fn layers_for_range<'a, T: ToOffset>(
522 &self,
523 range: Range<T>,
524 buffer: &BufferSnapshot,
525 ) -> Vec<(&Grammar, usize, Node)> {
526 let start = buffer.anchor_before(range.start.to_offset(buffer));
527 let end = buffer.anchor_after(range.end.to_offset(buffer));
528
529 let mut cursor = self.layers.filter::<_, ()>(|summary| {
530 if summary.max_depth > summary.min_depth {
531 true
532 } else {
533 let is_before_start = summary.range.end.cmp(&start, buffer).is_lt();
534 let is_after_end = summary.range.start.cmp(&end, buffer).is_gt();
535 !is_before_start && !is_after_end
536 }
537 });
538
539 let mut result = Vec::new();
540 cursor.next(buffer);
541 while let Some(layer) = cursor.item() {
542 if let Some(grammar) = &layer.language.grammar {
543 result.push((
544 grammar.as_ref(),
545 layer.depth,
546 layer.tree.root_node_with_offset(
547 layer.range.start.to_offset(buffer),
548 layer.range.start.to_point(buffer).to_ts_point(),
549 ),
550 ));
551 }
552 cursor.next(buffer)
553 }
554
555 result
556 }
557}
558
559impl<'a> SyntaxMapCaptures<'a> {
560 fn new(
561 range: Range<usize>,
562 text: &'a Rope,
563 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
564 query: fn(&Grammar) -> Option<&Query>,
565 ) -> Self {
566 let mut result = Self {
567 layers: Vec::new(),
568 grammars: Vec::new(),
569 active_layer_count: 0,
570 };
571 for (grammar, depth, node) in layers {
572 let query = if let Some(query) = query(grammar) {
573 query
574 } else {
575 continue;
576 };
577
578 let mut query_cursor = QueryCursorHandle::new();
579
580 // TODO - add a Tree-sitter API to remove the need for this.
581 let cursor = unsafe {
582 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
583 };
584
585 cursor.set_byte_range(range.clone());
586 let captures = cursor.captures(query, node, TextProvider(text));
587 let grammar_index = result
588 .grammars
589 .iter()
590 .position(|g| g.id == grammar.id())
591 .unwrap_or_else(|| {
592 result.grammars.push(grammar);
593 result.grammars.len() - 1
594 });
595 let mut layer = SyntaxMapCapturesLayer {
596 depth,
597 grammar_index,
598 next_capture: None,
599 captures,
600 _query_cursor: query_cursor,
601 };
602
603 layer.advance();
604 if layer.next_capture.is_some() {
605 let key = layer.sort_key();
606 let ix = match result.layers[..result.active_layer_count]
607 .binary_search_by_key(&key, |layer| layer.sort_key())
608 {
609 Ok(ix) | Err(ix) => ix,
610 };
611 result.layers.insert(ix, layer);
612 result.active_layer_count += 1;
613 } else {
614 result.layers.push(layer);
615 }
616 }
617
618 result
619 }
620
621 pub fn grammars(&self) -> &[&'a Grammar] {
622 &self.grammars
623 }
624
625 pub fn peek(&self) -> Option<SyntaxMapCapture<'a>> {
626 let layer = self.layers[..self.active_layer_count].first()?;
627 let capture = layer.next_capture?;
628 Some(SyntaxMapCapture {
629 depth: layer.depth,
630 grammar_index: layer.grammar_index,
631 index: capture.index,
632 node: capture.node,
633 })
634 }
635
636 pub fn advance(&mut self) -> bool {
637 let layer = if let Some(layer) = self.layers[..self.active_layer_count].first_mut() {
638 layer
639 } else {
640 return false;
641 };
642
643 layer.advance();
644 if layer.next_capture.is_some() {
645 let key = layer.sort_key();
646 let i = 1 + self.layers[1..self.active_layer_count]
647 .iter()
648 .position(|later_layer| key < later_layer.sort_key())
649 .unwrap_or(self.active_layer_count - 1);
650 self.layers[0..i].rotate_left(1);
651 } else {
652 self.layers[0..self.active_layer_count].rotate_left(1);
653 self.active_layer_count -= 1;
654 }
655
656 true
657 }
658
659 pub fn set_byte_range(&mut self, range: Range<usize>) {
660 for layer in &mut self.layers {
661 layer.captures.set_byte_range(range.clone());
662 if let Some(capture) = &layer.next_capture {
663 if capture.node.end_byte() > range.start {
664 continue;
665 }
666 }
667 layer.advance();
668 }
669 self.layers.sort_unstable_by_key(|layer| layer.sort_key());
670 self.active_layer_count = self
671 .layers
672 .iter()
673 .position(|layer| layer.next_capture.is_none())
674 .unwrap_or(self.layers.len());
675 }
676}
677
678impl<'a> SyntaxMapMatches<'a> {
679 fn new(
680 range: Range<usize>,
681 text: &'a Rope,
682 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
683 query: fn(&Grammar) -> Option<&Query>,
684 ) -> Self {
685 let mut result = Self::default();
686 for (grammar, depth, node) in layers {
687 let query = if let Some(query) = query(grammar) {
688 query
689 } else {
690 continue;
691 };
692
693 let mut query_cursor = QueryCursorHandle::new();
694
695 // TODO - add a Tree-sitter API to remove the need for this.
696 let cursor = unsafe {
697 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
698 };
699
700 cursor.set_byte_range(range.clone());
701 let matches = cursor.matches(query, node, TextProvider(text));
702 let grammar_index = result
703 .grammars
704 .iter()
705 .position(|g| g.id == grammar.id())
706 .unwrap_or_else(|| {
707 result.grammars.push(grammar);
708 result.grammars.len() - 1
709 });
710 let mut layer = SyntaxMapMatchesLayer {
711 depth,
712 grammar_index,
713 matches,
714 next_pattern_index: 0,
715 next_captures: Vec::new(),
716 has_next: false,
717 _query_cursor: query_cursor,
718 };
719
720 layer.advance();
721 if layer.has_next {
722 let key = layer.sort_key();
723 let ix = match result.layers[..result.active_layer_count]
724 .binary_search_by_key(&key, |layer| layer.sort_key())
725 {
726 Ok(ix) | Err(ix) => ix,
727 };
728 result.layers.insert(ix, layer);
729 result.active_layer_count += 1;
730 } else {
731 result.layers.push(layer);
732 }
733 }
734 result
735 }
736
737 pub fn grammars(&self) -> &[&'a Grammar] {
738 &self.grammars
739 }
740
741 pub fn peek(&self) -> Option<SyntaxMapMatch> {
742 let layer = self.layers.first()?;
743 if !layer.has_next {
744 return None;
745 }
746 Some(SyntaxMapMatch {
747 depth: layer.depth,
748 grammar_index: layer.grammar_index,
749 pattern_index: layer.next_pattern_index,
750 captures: &layer.next_captures,
751 })
752 }
753
754 pub fn advance(&mut self) -> bool {
755 let layer = if let Some(layer) = self.layers.first_mut() {
756 layer
757 } else {
758 return false;
759 };
760
761 layer.advance();
762 if layer.has_next {
763 let key = layer.sort_key();
764 let i = 1 + self.layers[1..self.active_layer_count]
765 .iter()
766 .position(|later_layer| key < later_layer.sort_key())
767 .unwrap_or(self.active_layer_count - 1);
768 self.layers[0..i].rotate_left(1);
769 } else {
770 self.layers[0..self.active_layer_count].rotate_left(1);
771 self.active_layer_count -= 1;
772 }
773
774 true
775 }
776}
777
778impl<'a> SyntaxMapCapturesLayer<'a> {
779 fn advance(&mut self) {
780 self.next_capture = self.captures.next().map(|(mat, ix)| mat.captures[ix]);
781 }
782
783 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
784 if let Some(capture) = &self.next_capture {
785 let range = capture.node.byte_range();
786 (range.start, Reverse(range.end), self.depth)
787 } else {
788 (usize::MAX, Reverse(0), usize::MAX)
789 }
790 }
791}
792
793impl<'a> SyntaxMapMatchesLayer<'a> {
794 fn advance(&mut self) {
795 if let Some(mat) = self.matches.next() {
796 self.next_captures.clear();
797 self.next_captures.extend_from_slice(&mat.captures);
798 self.next_pattern_index = mat.pattern_index;
799 self.has_next = true;
800 } else {
801 self.has_next = false;
802 }
803 }
804
805 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
806 if self.has_next {
807 let captures = &self.next_captures;
808 if let Some((first, last)) = captures.first().zip(captures.last()) {
809 return (
810 first.node.start_byte(),
811 Reverse(last.node.end_byte()),
812 self.depth,
813 );
814 }
815 }
816 (usize::MAX, Reverse(0), usize::MAX)
817 }
818}
819
820impl<'a> Iterator for SyntaxMapCaptures<'a> {
821 type Item = SyntaxMapCapture<'a>;
822
823 fn next(&mut self) -> Option<Self::Item> {
824 let result = self.peek();
825 self.advance();
826 result
827 }
828}
829
830fn join_ranges(
831 a: impl Iterator<Item = Range<usize>>,
832 b: impl Iterator<Item = Range<usize>>,
833) -> Vec<Range<usize>> {
834 let mut result = Vec::<Range<usize>>::new();
835 let mut a = a.peekable();
836 let mut b = b.peekable();
837 loop {
838 let range = match (a.peek(), b.peek()) {
839 (Some(range_a), Some(range_b)) => {
840 if range_a.start < range_b.start {
841 a.next().unwrap()
842 } else {
843 b.next().unwrap()
844 }
845 }
846 (None, Some(_)) => b.next().unwrap(),
847 (Some(_), None) => a.next().unwrap(),
848 (None, None) => break,
849 };
850
851 if let Some(last) = result.last_mut() {
852 if range.start <= last.end {
853 last.end = last.end.max(range.end);
854 continue;
855 }
856 }
857 result.push(range);
858 }
859 result
860}
861
862fn parse_text(
863 grammar: &Grammar,
864 text: &Rope,
865 old_tree: Option<Tree>,
866 mut ranges: Vec<tree_sitter::Range>,
867) -> Tree {
868 let (start_byte, start_point) = ranges
869 .first()
870 .map(|range| (range.start_byte, Point::from_ts_point(range.start_point)))
871 .unwrap_or_default();
872
873 for range in &mut ranges {
874 range.start_byte -= start_byte;
875 range.end_byte -= start_byte;
876 range.start_point = (Point::from_ts_point(range.start_point) - start_point).to_ts_point();
877 range.end_point = (Point::from_ts_point(range.end_point) - start_point).to_ts_point();
878 }
879
880 PARSER.with(|parser| {
881 let mut parser = parser.borrow_mut();
882 let mut chunks = text.chunks_in_range(start_byte..text.len());
883 parser
884 .set_included_ranges(&ranges)
885 .expect("overlapping ranges");
886 parser
887 .set_language(grammar.ts_language)
888 .expect("incompatible grammar");
889 parser
890 .parse_with(
891 &mut move |offset, _| {
892 chunks.seek(start_byte + offset);
893 chunks.next().unwrap_or("").as_bytes()
894 },
895 old_tree.as_ref(),
896 )
897 .expect("invalid language")
898 })
899}
900
901fn get_injections(
902 config: &InjectionConfig,
903 text: &BufferSnapshot,
904 node: Node,
905 language_registry: &LanguageRegistry,
906 depth: usize,
907 query_ranges: &[Range<usize>],
908 queue: &mut BinaryHeap<ReparseStep>,
909) -> bool {
910 let mut result = false;
911 let mut query_cursor = QueryCursorHandle::new();
912 let mut prev_match = None;
913 for query_range in query_ranges {
914 query_cursor.set_byte_range(query_range.start.saturating_sub(1)..query_range.end);
915 for mat in query_cursor.matches(&config.query, node, TextProvider(text.as_rope())) {
916 let content_ranges = mat
917 .nodes_for_capture_index(config.content_capture_ix)
918 .map(|node| node.range())
919 .collect::<Vec<_>>();
920 if content_ranges.is_empty() {
921 continue;
922 }
923
924 // Avoid duplicate matches if two changed ranges intersect the same injection.
925 let content_range =
926 content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
927 if let Some((last_pattern_ix, last_range)) = &prev_match {
928 if mat.pattern_index == *last_pattern_ix && content_range == *last_range {
929 continue;
930 }
931 }
932 prev_match = Some((mat.pattern_index, content_range.clone()));
933
934 let language_name = config.languages_by_pattern_ix[mat.pattern_index]
935 .as_ref()
936 .map(|s| Cow::Borrowed(s.as_ref()))
937 .or_else(|| {
938 let ix = config.language_capture_ix?;
939 let node = mat.nodes_for_capture_index(ix).next()?;
940 Some(Cow::Owned(text.text_for_range(node.byte_range()).collect()))
941 });
942
943 if let Some(language_name) = language_name {
944 if let Some(language) = language_registry.get_language(language_name.as_ref()) {
945 result = true;
946 let range = text.anchor_before(content_range.start)
947 ..text.anchor_after(content_range.end);
948 queue.push(ReparseStep {
949 depth,
950 language,
951 ranges: content_ranges,
952 range,
953 })
954 }
955 }
956 }
957 }
958 result
959}
960
961impl std::ops::Deref for SyntaxMap {
962 type Target = SyntaxSnapshot;
963
964 fn deref(&self) -> &Self::Target {
965 &self.snapshot
966 }
967}
968
969impl PartialEq for ReparseStep {
970 fn eq(&self, _: &Self) -> bool {
971 false
972 }
973}
974
975impl Eq for ReparseStep {}
976
977impl PartialOrd for ReparseStep {
978 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
979 Some(self.cmp(&other))
980 }
981}
982
983impl Ord for ReparseStep {
984 fn cmp(&self, other: &Self) -> Ordering {
985 let range_a = self.range();
986 let range_b = other.range();
987 Ord::cmp(&other.depth, &self.depth)
988 .then_with(|| Ord::cmp(&range_b.start, &range_a.start))
989 .then_with(|| Ord::cmp(&range_a.end, &range_b.end))
990 }
991}
992
993impl ReparseStep {
994 fn range(&self) -> Range<usize> {
995 let start = self.ranges.first().map_or(0, |r| r.start_byte);
996 let end = self.ranges.last().map_or(0, |r| r.end_byte);
997 start..end
998 }
999}
1000
1001impl ChangedRegion {
1002 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
1003 let range_a = &self.range;
1004 let range_b = &other.range;
1005 Ord::cmp(&self.depth, &other.depth)
1006 .then_with(|| range_a.start.cmp(&range_b.start, buffer))
1007 .then_with(|| range_b.end.cmp(&range_a.end, buffer))
1008 }
1009}
1010
1011impl ChangeRegionSet {
1012 fn start_position(&self) -> DepthAndMaxPosition {
1013 self.0
1014 .first()
1015 .map_or(DepthAndMaxPosition(usize::MAX, Anchor::MAX), |region| {
1016 DepthAndMaxPosition(region.depth, region.range.start)
1017 })
1018 }
1019
1020 fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
1021 for region in &self.0 {
1022 if region.depth < layer.depth {
1023 continue;
1024 }
1025 if region.depth > layer.depth {
1026 break;
1027 }
1028 if region.range.end.cmp(&layer.range.start, text).is_le() {
1029 continue;
1030 }
1031 if region.range.start.cmp(&layer.range.end, text).is_ge() {
1032 break;
1033 }
1034 return true;
1035 }
1036 false
1037 }
1038
1039 fn insert(&mut self, region: ChangedRegion, text: &BufferSnapshot) {
1040 if let Err(ix) = self.0.binary_search_by(|probe| probe.cmp(®ion, text)) {
1041 self.0.insert(ix, region);
1042 }
1043 }
1044
1045 fn prune(&mut self, summary: SyntaxLayerSummary, text: &BufferSnapshot) -> bool {
1046 let prev_len = self.0.len();
1047 self.0.retain(|region| {
1048 region.depth > summary.max_depth
1049 || (region.depth == summary.max_depth
1050 && region
1051 .range
1052 .end
1053 .cmp(&summary.last_layer_range.start, text)
1054 .is_gt())
1055 });
1056 self.0.len() < prev_len
1057 }
1058}
1059
1060impl Default for SyntaxLayerSummary {
1061 fn default() -> Self {
1062 Self {
1063 max_depth: 0,
1064 min_depth: 0,
1065 range: Anchor::MAX..Anchor::MIN,
1066 last_layer_range: Anchor::MIN..Anchor::MAX,
1067 }
1068 }
1069}
1070
1071impl sum_tree::Summary for SyntaxLayerSummary {
1072 type Context = BufferSnapshot;
1073
1074 fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
1075 if other.max_depth > self.max_depth {
1076 self.max_depth = other.max_depth;
1077 self.range = other.range.clone();
1078 } else {
1079 if other.range.start.cmp(&self.range.start, buffer).is_lt() {
1080 self.range.start = other.range.start;
1081 }
1082 if other.range.end.cmp(&self.range.end, buffer).is_gt() {
1083 self.range.end = other.range.end;
1084 }
1085 }
1086 self.last_layer_range = other.last_layer_range.clone();
1087 }
1088}
1089
1090impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRange {
1091 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1092 Ord::cmp(&self.0, &cursor_location.max_depth)
1093 .then_with(|| {
1094 self.1
1095 .start
1096 .cmp(&cursor_location.last_layer_range.start, buffer)
1097 })
1098 .then_with(|| {
1099 cursor_location
1100 .last_layer_range
1101 .end
1102 .cmp(&self.1.end, buffer)
1103 })
1104 }
1105}
1106
1107impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndMaxPosition {
1108 fn cmp(&self, cursor_location: &SyntaxLayerSummary, text: &BufferSnapshot) -> Ordering {
1109 Ord::cmp(&self.0, &cursor_location.max_depth)
1110 .then_with(|| self.1.cmp(&cursor_location.range.end, text))
1111 }
1112}
1113
1114impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRangeOrMaxPosition {
1115 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1116 if self.1.cmp(cursor_location, buffer).is_le() {
1117 return Ordering::Less;
1118 } else {
1119 self.0.cmp(cursor_location, buffer)
1120 }
1121 }
1122}
1123
1124impl sum_tree::Item for SyntaxLayer {
1125 type Summary = SyntaxLayerSummary;
1126
1127 fn summary(&self) -> Self::Summary {
1128 SyntaxLayerSummary {
1129 min_depth: self.depth,
1130 max_depth: self.depth,
1131 range: self.range.clone(),
1132 last_layer_range: self.range.clone(),
1133 }
1134 }
1135}
1136
1137impl std::fmt::Debug for SyntaxLayer {
1138 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1139 f.debug_struct("SyntaxLayer")
1140 .field("depth", &self.depth)
1141 .field("range", &self.range)
1142 .field("tree", &self.tree)
1143 .finish()
1144 }
1145}
1146
1147impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1148 type I = ByteChunks<'a>;
1149
1150 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1151 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1152 }
1153}
1154
1155impl<'a> Iterator for ByteChunks<'a> {
1156 type Item = &'a [u8];
1157
1158 fn next(&mut self) -> Option<Self::Item> {
1159 self.0.next().map(str::as_bytes)
1160 }
1161}
1162
1163impl QueryCursorHandle {
1164 pub(crate) fn new() -> Self {
1165 let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
1166 cursor.set_match_limit(64);
1167 QueryCursorHandle(Some(cursor))
1168 }
1169}
1170
1171impl Deref for QueryCursorHandle {
1172 type Target = QueryCursor;
1173
1174 fn deref(&self) -> &Self::Target {
1175 self.0.as_ref().unwrap()
1176 }
1177}
1178
1179impl DerefMut for QueryCursorHandle {
1180 fn deref_mut(&mut self) -> &mut Self::Target {
1181 self.0.as_mut().unwrap()
1182 }
1183}
1184
1185impl Drop for QueryCursorHandle {
1186 fn drop(&mut self) {
1187 let mut cursor = self.0.take().unwrap();
1188 cursor.set_byte_range(0..usize::MAX);
1189 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1190 QUERY_CURSORS.lock().push(cursor)
1191 }
1192}
1193
1194pub(crate) trait ToTreeSitterPoint {
1195 fn to_ts_point(self) -> tree_sitter::Point;
1196 fn from_ts_point(point: tree_sitter::Point) -> Self;
1197}
1198
1199impl ToTreeSitterPoint for Point {
1200 fn to_ts_point(self) -> tree_sitter::Point {
1201 tree_sitter::Point::new(self.row as usize, self.column as usize)
1202 }
1203
1204 fn from_ts_point(point: tree_sitter::Point) -> Self {
1205 Point::new(point.row as u32, point.column as u32)
1206 }
1207}
1208
1209#[cfg(test)]
1210mod tests {
1211 use super::*;
1212 use crate::LanguageConfig;
1213 use rand::rngs::StdRng;
1214 use std::env;
1215 use text::{Buffer, Point};
1216 use unindent::Unindent as _;
1217 use util::test::marked_text_ranges;
1218
1219 #[gpui::test]
1220 fn test_syntax_map_layers_for_range() {
1221 let registry = Arc::new(LanguageRegistry::test());
1222 let language = Arc::new(rust_lang());
1223 registry.add(language.clone());
1224
1225 let mut buffer = Buffer::new(
1226 0,
1227 0,
1228 r#"
1229 fn a() {
1230 assert_eq!(
1231 b(vec![C {}]),
1232 vec![d.e],
1233 );
1234 println!("{}", f(|_| true));
1235 }
1236 "#
1237 .unindent(),
1238 );
1239
1240 let mut syntax_map = SyntaxMap::new();
1241 syntax_map.set_language_registry(registry.clone());
1242 syntax_map.reparse(language.clone(), &buffer);
1243
1244 assert_layers_for_range(
1245 &syntax_map,
1246 &buffer,
1247 Point::new(2, 0)..Point::new(2, 0),
1248 &[
1249 "...(function_item ... (block (expression_statement (macro_invocation...",
1250 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1251 ],
1252 );
1253 assert_layers_for_range(
1254 &syntax_map,
1255 &buffer,
1256 Point::new(2, 14)..Point::new(2, 16),
1257 &[
1258 "...(function_item ...",
1259 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1260 "...(array_expression (struct_expression ...",
1261 ],
1262 );
1263 assert_layers_for_range(
1264 &syntax_map,
1265 &buffer,
1266 Point::new(3, 14)..Point::new(3, 16),
1267 &[
1268 "...(function_item ...",
1269 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1270 "...(array_expression (field_expression ...",
1271 ],
1272 );
1273 assert_layers_for_range(
1274 &syntax_map,
1275 &buffer,
1276 Point::new(5, 12)..Point::new(5, 16),
1277 &[
1278 "...(function_item ...",
1279 "...(call_expression ... (arguments (closure_expression ...",
1280 ],
1281 );
1282
1283 // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
1284 let macro_name_range = range_for_text(&buffer, "vec!");
1285 buffer.edit([(macro_name_range, "&")]);
1286 syntax_map.interpolate(&buffer);
1287 syntax_map.reparse(language.clone(), &buffer);
1288
1289 assert_layers_for_range(
1290 &syntax_map,
1291 &buffer,
1292 Point::new(2, 14)..Point::new(2, 16),
1293 &[
1294 "...(function_item ...",
1295 "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
1296 ],
1297 );
1298
1299 // Put the vec! macro back, adding back the syntactic layer.
1300 buffer.undo();
1301 syntax_map.interpolate(&buffer);
1302 syntax_map.reparse(language.clone(), &buffer);
1303
1304 assert_layers_for_range(
1305 &syntax_map,
1306 &buffer,
1307 Point::new(2, 14)..Point::new(2, 16),
1308 &[
1309 "...(function_item ...",
1310 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1311 "...(array_expression (struct_expression ...",
1312 ],
1313 );
1314 }
1315
1316 #[gpui::test]
1317 fn test_typing_multiple_new_injections() {
1318 let (buffer, syntax_map) = test_edit_sequence(&[
1319 "fn a() { dbg }",
1320 "fn a() { dbg«!» }",
1321 "fn a() { dbg!«()» }",
1322 "fn a() { dbg!(«b») }",
1323 "fn a() { dbg!(b«.») }",
1324 "fn a() { dbg!(b.«c») }",
1325 "fn a() { dbg!(b.c«()») }",
1326 "fn a() { dbg!(b.c(«vec»)) }",
1327 "fn a() { dbg!(b.c(vec«!»)) }",
1328 "fn a() { dbg!(b.c(vec!«[]»)) }",
1329 "fn a() { dbg!(b.c(vec![«d»])) }",
1330 "fn a() { dbg!(b.c(vec![d«.»])) }",
1331 "fn a() { dbg!(b.c(vec![d.«e»])) }",
1332 ]);
1333
1334 assert_capture_ranges(
1335 &syntax_map,
1336 &buffer,
1337 &["field"],
1338 "fn a() { dbg!(b.«c»(vec![d.«e»])) }",
1339 );
1340 }
1341
1342 #[gpui::test]
1343 fn test_pasting_new_injection_line_between_others() {
1344 let (buffer, syntax_map) = test_edit_sequence(&[
1345 "
1346 fn a() {
1347 b!(B {});
1348 c!(C {});
1349 d!(D {});
1350 e!(E {});
1351 f!(F {});
1352 g!(G {});
1353 }
1354 ",
1355 "
1356 fn a() {
1357 b!(B {});
1358 c!(C {});
1359 d!(D {});
1360 « h!(H {});
1361 » e!(E {});
1362 f!(F {});
1363 g!(G {});
1364 }
1365 ",
1366 ]);
1367
1368 assert_capture_ranges(
1369 &syntax_map,
1370 &buffer,
1371 &["struct"],
1372 "
1373 fn a() {
1374 b!(«B {}»);
1375 c!(«C {}»);
1376 d!(«D {}»);
1377 h!(«H {}»);
1378 e!(«E {}»);
1379 f!(«F {}»);
1380 g!(«G {}»);
1381 }
1382 ",
1383 );
1384 }
1385
1386 #[gpui::test]
1387 fn test_joining_injections_with_child_injections() {
1388 let (buffer, syntax_map) = test_edit_sequence(&[
1389 "
1390 fn a() {
1391 b!(
1392 c![one.two.three],
1393 d![four.five.six],
1394 );
1395 e!(
1396 f![seven.eight],
1397 );
1398 }
1399 ",
1400 "
1401 fn a() {
1402 b!(
1403 c![one.two.three],
1404 d![four.five.six],
1405 ˇ f![seven.eight],
1406 );
1407 }
1408 ",
1409 ]);
1410
1411 assert_capture_ranges(
1412 &syntax_map,
1413 &buffer,
1414 &["field"],
1415 "
1416 fn a() {
1417 b!(
1418 c![one.«two».«three»],
1419 d![four.«five».«six»],
1420 f![seven.«eight»],
1421 );
1422 }
1423 ",
1424 );
1425 }
1426
1427 #[gpui::test]
1428 fn test_editing_edges_of_injection() {
1429 test_edit_sequence(&[
1430 "
1431 fn a() {
1432 b!(c!())
1433 }
1434 ",
1435 "
1436 fn a() {
1437 «d»!(c!())
1438 }
1439 ",
1440 "
1441 fn a() {
1442 «e»d!(c!())
1443 }
1444 ",
1445 "
1446 fn a() {
1447 ed!«[»c!()«]»
1448 }
1449 ",
1450 ]);
1451 }
1452
1453 #[gpui::test]
1454 fn test_edits_preceding_and_intersecting_injection() {
1455 test_edit_sequence(&[
1456 //
1457 "const aaaaaaaaaaaa: B = c!(d(e.f));",
1458 "const aˇa: B = c!(d(eˇ));",
1459 ]);
1460 }
1461
1462 #[gpui::test]
1463 fn test_non_local_changes_create_injections() {
1464 test_edit_sequence(&[
1465 "
1466 // a! {
1467 static B: C = d;
1468 // }
1469 ",
1470 "
1471 ˇa! {
1472 static B: C = d;
1473 ˇ}
1474 ",
1475 ]);
1476 }
1477
1478 #[gpui::test]
1479 fn test_creating_many_injections_in_one_edit() {
1480 test_edit_sequence(&[
1481 "
1482 fn a() {
1483 one(Two::three(3));
1484 four(Five::six(6));
1485 seven(Eight::nine(9));
1486 }
1487 ",
1488 "
1489 fn a() {
1490 one«!»(Two::three(3));
1491 four«!»(Five::six(6));
1492 seven«!»(Eight::nine(9));
1493 }
1494 ",
1495 "
1496 fn a() {
1497 one!(Two::three«!»(3));
1498 four!(Five::six«!»(6));
1499 seven!(Eight::nine«!»(9));
1500 }
1501 ",
1502 ]);
1503 }
1504
1505 #[gpui::test]
1506 fn test_editing_across_injection_boundary() {
1507 test_edit_sequence(&[
1508 "
1509 fn one() {
1510 two();
1511 three!(
1512 three.four,
1513 five.six,
1514 );
1515 }
1516 ",
1517 "
1518 fn one() {
1519 two();
1520 th«irty_five![»
1521 three.four,
1522 five.six,
1523 « seven.eight,
1524 ];»
1525 }
1526 ",
1527 ]);
1528 }
1529
1530 #[gpui::test]
1531 fn test_removing_injection_by_replacing_across_boundary() {
1532 test_edit_sequence(&[
1533 "
1534 fn one() {
1535 two!(
1536 three.four,
1537 );
1538 }
1539 ",
1540 "
1541 fn one() {
1542 t«en
1543 .eleven(
1544 twelve,
1545 »
1546 three.four,
1547 );
1548 }
1549 ",
1550 ]);
1551 }
1552
1553 #[gpui::test(iterations = 100)]
1554 fn test_random_syntax_map_edits(mut rng: StdRng) {
1555 let operations = env::var("OPERATIONS")
1556 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1557 .unwrap_or(10);
1558
1559 let text = r#"
1560 fn test_something() {
1561 let vec = vec![5, 1, 3, 8];
1562 assert_eq!(
1563 vec
1564 .into_iter()
1565 .map(|i| i * 2)
1566 .collect::<Vec<usize>>(),
1567 vec![
1568 5 * 2, 1 * 2, 3 * 2, 8 * 2
1569 ],
1570 );
1571 }
1572 "#
1573 .unindent()
1574 .repeat(2);
1575
1576 let registry = Arc::new(LanguageRegistry::test());
1577 let language = Arc::new(rust_lang());
1578 registry.add(language.clone());
1579 let mut buffer = Buffer::new(0, 0, text);
1580
1581 let mut syntax_map = SyntaxMap::new();
1582 syntax_map.set_language_registry(registry.clone());
1583 syntax_map.reparse(language.clone(), &buffer);
1584
1585 let mut reference_syntax_map = SyntaxMap::new();
1586 reference_syntax_map.set_language_registry(registry.clone());
1587
1588 log::info!("initial text:\n{}", buffer.text());
1589
1590 for _ in 0..operations {
1591 let prev_buffer = buffer.snapshot();
1592 let prev_syntax_map = syntax_map.snapshot();
1593
1594 buffer.randomly_edit(&mut rng, 3);
1595 log::info!("text:\n{}", buffer.text());
1596
1597 syntax_map.interpolate(&buffer);
1598 check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
1599
1600 syntax_map.reparse(language.clone(), &buffer);
1601
1602 reference_syntax_map.clear();
1603 reference_syntax_map.reparse(language.clone(), &buffer);
1604 }
1605
1606 for i in 0..operations {
1607 let i = operations - i - 1;
1608 buffer.undo();
1609 log::info!("undoing operation {}", i);
1610 log::info!("text:\n{}", buffer.text());
1611
1612 syntax_map.interpolate(&buffer);
1613 syntax_map.reparse(language.clone(), &buffer);
1614
1615 reference_syntax_map.clear();
1616 reference_syntax_map.reparse(language.clone(), &buffer);
1617 assert_eq!(
1618 syntax_map.layers(&buffer).len(),
1619 reference_syntax_map.layers(&buffer).len(),
1620 "wrong number of layers after undoing edit {i}"
1621 );
1622 }
1623
1624 let layers = syntax_map.layers(&buffer);
1625 let reference_layers = reference_syntax_map.layers(&buffer);
1626 for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
1627 {
1628 assert_eq!(edited_layer.2.to_sexp(), reference_layer.2.to_sexp());
1629 assert_eq!(edited_layer.2.range(), reference_layer.2.range());
1630 }
1631 }
1632
1633 fn check_interpolation(
1634 old_syntax_map: &SyntaxSnapshot,
1635 new_syntax_map: &SyntaxSnapshot,
1636 old_buffer: &BufferSnapshot,
1637 new_buffer: &BufferSnapshot,
1638 ) {
1639 let edits = new_buffer
1640 .edits_since::<usize>(&old_buffer.version())
1641 .collect::<Vec<_>>();
1642
1643 for (old_layer, new_layer) in old_syntax_map
1644 .layers
1645 .iter()
1646 .zip(new_syntax_map.layers.iter())
1647 {
1648 assert_eq!(old_layer.range, new_layer.range);
1649 let old_start_byte = old_layer.range.start.to_offset(old_buffer);
1650 let new_start_byte = new_layer.range.start.to_offset(new_buffer);
1651 let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
1652 let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
1653 let old_node = old_layer
1654 .tree
1655 .root_node_with_offset(old_start_byte, old_start_point);
1656 let new_node = new_layer
1657 .tree
1658 .root_node_with_offset(new_start_byte, new_start_point);
1659 check_node_edits(
1660 old_layer.depth,
1661 &old_layer.range,
1662 old_node,
1663 new_node,
1664 old_buffer,
1665 new_buffer,
1666 &edits,
1667 );
1668 }
1669
1670 fn check_node_edits(
1671 depth: usize,
1672 range: &Range<Anchor>,
1673 old_node: Node,
1674 new_node: Node,
1675 old_buffer: &BufferSnapshot,
1676 new_buffer: &BufferSnapshot,
1677 edits: &[text::Edit<usize>],
1678 ) {
1679 assert_eq!(old_node.kind(), new_node.kind());
1680
1681 let old_range = old_node.byte_range();
1682 let new_range = new_node.byte_range();
1683
1684 let is_edited = edits
1685 .iter()
1686 .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
1687 if is_edited {
1688 assert!(
1689 new_node.has_changes(),
1690 concat!(
1691 "failed to mark node as edited.\n",
1692 "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
1693 "node kind: {}, old node range: {:?}, new node range: {:?}",
1694 ),
1695 depth,
1696 range.to_offset(old_buffer),
1697 range.to_offset(new_buffer),
1698 new_node.kind(),
1699 old_range,
1700 new_range,
1701 );
1702 }
1703
1704 if !new_node.has_changes() {
1705 assert_eq!(
1706 old_buffer
1707 .text_for_range(old_range.clone())
1708 .collect::<String>(),
1709 new_buffer
1710 .text_for_range(new_range.clone())
1711 .collect::<String>(),
1712 concat!(
1713 "mismatched text for node\n",
1714 "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
1715 "node kind: {}, old node range:{:?}, new node range:{:?}",
1716 ),
1717 depth,
1718 range.to_offset(old_buffer),
1719 range.to_offset(new_buffer),
1720 new_node.kind(),
1721 old_range,
1722 new_range,
1723 );
1724 }
1725
1726 for i in 0..new_node.child_count() {
1727 check_node_edits(
1728 depth,
1729 range,
1730 old_node.child(i).unwrap(),
1731 new_node.child(i).unwrap(),
1732 old_buffer,
1733 new_buffer,
1734 edits,
1735 )
1736 }
1737 }
1738 }
1739
1740 fn test_edit_sequence(steps: &[&str]) -> (Buffer, SyntaxMap) {
1741 let registry = Arc::new(LanguageRegistry::test());
1742 let language = Arc::new(rust_lang());
1743 registry.add(language.clone());
1744 let mut buffer = Buffer::new(0, 0, Default::default());
1745
1746 let mut mutated_syntax_map = SyntaxMap::new();
1747 mutated_syntax_map.set_language_registry(registry.clone());
1748 mutated_syntax_map.reparse(language.clone(), &buffer);
1749
1750 for (i, marked_string) in steps.into_iter().enumerate() {
1751 edit_buffer(&mut buffer, &marked_string.unindent());
1752
1753 // Reparse the syntax map
1754 mutated_syntax_map.interpolate(&buffer);
1755 mutated_syntax_map.reparse(language.clone(), &buffer);
1756
1757 // Create a second syntax map from scratch
1758 let mut reference_syntax_map = SyntaxMap::new();
1759 reference_syntax_map.set_language_registry(registry.clone());
1760 reference_syntax_map.reparse(language.clone(), &buffer);
1761
1762 // Compare the mutated syntax map to the new syntax map
1763 let mutated_layers = mutated_syntax_map.layers(&buffer);
1764 let reference_layers = reference_syntax_map.layers(&buffer);
1765 assert_eq!(
1766 mutated_layers.len(),
1767 reference_layers.len(),
1768 "wrong number of layers at step {i}"
1769 );
1770 for (edited_layer, reference_layer) in
1771 mutated_layers.into_iter().zip(reference_layers.into_iter())
1772 {
1773 assert_eq!(
1774 edited_layer.2.to_sexp(),
1775 reference_layer.2.to_sexp(),
1776 "different layer at step {i}"
1777 );
1778 assert_eq!(
1779 edited_layer.2.range(),
1780 reference_layer.2.range(),
1781 "different layer at step {i}"
1782 );
1783 }
1784 }
1785
1786 (buffer, mutated_syntax_map)
1787 }
1788
1789 fn rust_lang() -> Language {
1790 Language::new(
1791 LanguageConfig {
1792 name: "Rust".into(),
1793 path_suffixes: vec!["rs".to_string()],
1794 ..Default::default()
1795 },
1796 Some(tree_sitter_rust::language()),
1797 )
1798 .with_highlights_query(
1799 r#"
1800 (field_identifier) @field
1801 (struct_expression) @struct
1802 "#,
1803 )
1804 .unwrap()
1805 .with_injection_query(
1806 r#"
1807 (macro_invocation
1808 (token_tree) @content
1809 (#set! "language" "rust"))
1810 "#,
1811 )
1812 .unwrap()
1813 }
1814
1815 fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
1816 let start = buffer.as_rope().to_string().find(text).unwrap();
1817 start..start + text.len()
1818 }
1819
1820 fn assert_layers_for_range(
1821 syntax_map: &SyntaxMap,
1822 buffer: &BufferSnapshot,
1823 range: Range<Point>,
1824 expected_layers: &[&str],
1825 ) {
1826 let layers = syntax_map.layers_for_range(range, &buffer);
1827 assert_eq!(
1828 layers.len(),
1829 expected_layers.len(),
1830 "wrong number of layers"
1831 );
1832 for (i, ((_, _, node), expected_s_exp)) in
1833 layers.iter().zip(expected_layers.iter()).enumerate()
1834 {
1835 let actual_s_exp = node.to_sexp();
1836 assert!(
1837 string_contains_sequence(
1838 &actual_s_exp,
1839 &expected_s_exp.split("...").collect::<Vec<_>>()
1840 ),
1841 "layer {i}:\n\nexpected: {expected_s_exp}\nactual: {actual_s_exp}",
1842 );
1843 }
1844 }
1845
1846 fn assert_capture_ranges(
1847 syntax_map: &SyntaxMap,
1848 buffer: &BufferSnapshot,
1849 highlight_query_capture_names: &[&str],
1850 marked_string: &str,
1851 ) {
1852 let mut actual_ranges = Vec::<Range<usize>>::new();
1853 let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
1854 grammar.highlights_query.as_ref()
1855 });
1856 let queries = captures
1857 .grammars()
1858 .iter()
1859 .map(|grammar| grammar.highlights_query.as_ref().unwrap())
1860 .collect::<Vec<_>>();
1861 for capture in captures {
1862 let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
1863 if highlight_query_capture_names.contains(&name.as_str()) {
1864 actual_ranges.push(capture.node.byte_range());
1865 }
1866 }
1867
1868 let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
1869 assert_eq!(text, buffer.text());
1870 assert_eq!(actual_ranges, expected_ranges);
1871 }
1872
1873 fn edit_buffer(buffer: &mut Buffer, marked_string: &str) {
1874 let old_text = buffer.text();
1875 let (new_text, mut ranges) = marked_text_ranges(marked_string, false);
1876 if ranges.is_empty() {
1877 ranges.push(0..new_text.len());
1878 }
1879
1880 assert_eq!(
1881 old_text[..ranges[0].start],
1882 new_text[..ranges[0].start],
1883 "invalid edit"
1884 );
1885
1886 let mut delta = 0;
1887 let mut edits = Vec::new();
1888 let mut ranges = ranges.into_iter().peekable();
1889
1890 while let Some(inserted_range) = ranges.next() {
1891 let new_start = inserted_range.start;
1892 let old_start = (new_start as isize - delta) as usize;
1893
1894 let following_text = if let Some(next_range) = ranges.peek() {
1895 &new_text[inserted_range.end..next_range.start]
1896 } else {
1897 &new_text[inserted_range.end..]
1898 };
1899
1900 let inserted_len = inserted_range.len();
1901 let deleted_len = old_text[old_start..]
1902 .find(following_text)
1903 .expect("invalid edit");
1904
1905 let old_range = old_start..old_start + deleted_len;
1906 edits.push((old_range, new_text[inserted_range].to_string()));
1907 delta += inserted_len as isize - deleted_len as isize;
1908 }
1909
1910 assert_eq!(
1911 old_text.len() as isize + delta,
1912 new_text.len() as isize,
1913 "invalid edit"
1914 );
1915
1916 buffer.edit(edits);
1917 }
1918
1919 pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
1920 let mut last_part_end = 0;
1921 for part in parts {
1922 if let Some(start_ix) = text[last_part_end..].find(part) {
1923 last_part_end = start_ix + part.len();
1924 } else {
1925 return false;
1926 }
1927 }
1928 true
1929 }
1930}