1use crate::{Grammar, InjectionConfig, Language, LanguageRegistry};
2use lazy_static::lazy_static;
3use parking_lot::Mutex;
4use std::{
5 borrow::Cow,
6 cell::RefCell,
7 cmp::{Ordering, Reverse},
8 collections::BinaryHeap,
9 ops::{Deref, DerefMut, Range},
10 sync::Arc,
11};
12use sum_tree::{Bias, SeekTarget, SumTree};
13use text::{rope, Anchor, BufferSnapshot, OffsetRangeExt, Point, Rope, ToOffset, ToPoint};
14use tree_sitter::{
15 Node, Parser, Query, QueryCapture, QueryCaptures, QueryCursor, QueryMatches, Tree,
16};
17
18thread_local! {
19 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
20}
21
22lazy_static! {
23 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
24}
25
26#[derive(Default)]
27pub struct SyntaxMap {
28 parsed_version: clock::Global,
29 interpolated_version: clock::Global,
30 snapshot: SyntaxSnapshot,
31 language_registry: Option<Arc<LanguageRegistry>>,
32}
33
34#[derive(Clone, Default)]
35pub struct SyntaxSnapshot {
36 layers: SumTree<SyntaxLayer>,
37}
38
39#[derive(Default)]
40pub struct SyntaxMapCaptures<'a> {
41 layers: Vec<SyntaxMapCapturesLayer<'a>>,
42 active_layer_count: usize,
43 grammars: Vec<&'a Grammar>,
44}
45
46#[derive(Default)]
47pub struct SyntaxMapMatches<'a> {
48 layers: Vec<SyntaxMapMatchesLayer<'a>>,
49 active_layer_count: usize,
50 grammars: Vec<&'a Grammar>,
51}
52
53#[derive(Debug)]
54pub struct SyntaxMapCapture<'a> {
55 pub depth: usize,
56 pub node: Node<'a>,
57 pub index: u32,
58 pub grammar_index: usize,
59}
60
61#[derive(Debug)]
62pub struct SyntaxMapMatch<'a> {
63 pub depth: usize,
64 pub pattern_index: usize,
65 pub captures: &'a [QueryCapture<'a>],
66 pub grammar_index: usize,
67}
68
69struct SyntaxMapCapturesLayer<'a> {
70 depth: usize,
71 captures: QueryCaptures<'a, 'a, TextProvider<'a>>,
72 next_capture: Option<QueryCapture<'a>>,
73 grammar_index: usize,
74 _query_cursor: QueryCursorHandle,
75}
76
77struct SyntaxMapMatchesLayer<'a> {
78 depth: usize,
79 next_pattern_index: usize,
80 next_captures: Vec<QueryCapture<'a>>,
81 has_next: bool,
82 matches: QueryMatches<'a, 'a, TextProvider<'a>>,
83 grammar_index: usize,
84 _query_cursor: QueryCursorHandle,
85}
86
87#[derive(Clone)]
88struct SyntaxLayer {
89 depth: usize,
90 range: Range<Anchor>,
91 tree: tree_sitter::Tree,
92 language: Arc<Language>,
93}
94
95#[derive(Debug, Clone)]
96struct SyntaxLayerSummary {
97 min_depth: usize,
98 max_depth: usize,
99 range: Range<Anchor>,
100 last_layer_range: Range<Anchor>,
101}
102
103#[derive(Clone, Debug)]
104struct DepthAndRange(usize, Range<Anchor>);
105
106#[derive(Clone, Debug)]
107struct DepthAndMaxPosition(usize, Anchor);
108
109#[derive(Clone, Debug)]
110struct DepthAndRangeOrMaxPosition(DepthAndRange, DepthAndMaxPosition);
111
112struct ReparseStep {
113 depth: usize,
114 language: Arc<Language>,
115 ranges: Vec<tree_sitter::Range>,
116 range: Range<Anchor>,
117}
118
119#[derive(Debug, PartialEq, Eq)]
120struct ChangedRegion {
121 depth: usize,
122 range: Range<Anchor>,
123}
124
125#[derive(Default)]
126struct ChangeRegionSet(Vec<ChangedRegion>);
127
128struct TextProvider<'a>(&'a Rope);
129
130struct ByteChunks<'a>(rope::Chunks<'a>);
131
132struct QueryCursorHandle(Option<QueryCursor>);
133
134impl SyntaxMap {
135 pub fn new() -> Self {
136 Self::default()
137 }
138
139 pub fn set_language_registry(&mut self, registry: Arc<LanguageRegistry>) {
140 self.language_registry = Some(registry);
141 }
142
143 pub fn snapshot(&self) -> SyntaxSnapshot {
144 self.snapshot.clone()
145 }
146
147 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
148 self.language_registry.clone()
149 }
150
151 pub fn parsed_version(&self) -> clock::Global {
152 self.parsed_version.clone()
153 }
154
155 pub fn interpolate(&mut self, text: &BufferSnapshot) {
156 self.snapshot.interpolate(&self.interpolated_version, text);
157 self.interpolated_version = text.version.clone();
158 }
159
160 #[cfg(test)]
161 pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
162 self.snapshot.reparse(
163 &self.parsed_version,
164 text,
165 self.language_registry.clone(),
166 language,
167 );
168 self.parsed_version = text.version.clone();
169 self.interpolated_version = text.version.clone();
170 }
171
172 pub fn did_parse(&mut self, snapshot: SyntaxSnapshot, version: clock::Global) {
173 self.interpolated_version = version.clone();
174 self.parsed_version = version;
175 self.snapshot = snapshot;
176 }
177
178 pub fn clear(&mut self) {
179 self.snapshot = SyntaxSnapshot::default();
180 }
181}
182
183impl SyntaxSnapshot {
184 pub fn is_empty(&self) -> bool {
185 self.layers.is_empty()
186 }
187
188 pub fn interpolate(&mut self, from_version: &clock::Global, text: &BufferSnapshot) {
189 let edits = text
190 .edits_since::<(usize, Point)>(&from_version)
191 .collect::<Vec<_>>();
192 if edits.is_empty() {
193 return;
194 }
195
196 let mut layers = SumTree::new();
197 let mut first_edit_ix_for_depth = 0;
198 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
199 cursor.next(text);
200
201 'outer: loop {
202 let depth = cursor.end(text).max_depth;
203
204 // Preserve any layers at this depth that precede the first edit.
205 if let Some(first_edit) = edits.get(first_edit_ix_for_depth) {
206 let target = DepthAndMaxPosition(depth, text.anchor_before(first_edit.new.start.0));
207 if target.cmp(&cursor.start(), text).is_gt() {
208 let slice = cursor.slice(&target, Bias::Left, text);
209 layers.push_tree(slice, text);
210 }
211 }
212 // If this layer follows all of the edits, then preserve it and any
213 // subsequent layers at this same depth.
214 else {
215 let slice = cursor.slice(
216 &DepthAndRange(depth + 1, Anchor::MIN..Anchor::MAX),
217 Bias::Left,
218 text,
219 );
220 layers.push_tree(slice, text);
221 first_edit_ix_for_depth = 0;
222 continue;
223 };
224
225 let layer = if let Some(layer) = cursor.item() {
226 layer
227 } else {
228 break;
229 };
230
231 let mut endpoints = text
232 .summaries_for_anchors::<(usize, Point), _>([&layer.range.start, &layer.range.end]);
233 let layer_range = endpoints.next().unwrap()..endpoints.next().unwrap();
234 let start_byte = layer_range.start.0;
235 let start_point = layer_range.start.1;
236 let end_byte = layer_range.end.0;
237
238 // Ignore edits that end before the start of this layer, and don't consider them
239 // for any subsequent layers at this same depth.
240 loop {
241 if let Some(edit) = edits.get(first_edit_ix_for_depth) {
242 if edit.new.end.0 < start_byte {
243 first_edit_ix_for_depth += 1;
244 } else {
245 break;
246 }
247 } else {
248 continue 'outer;
249 }
250 }
251
252 let mut old_start_byte = start_byte;
253 if first_edit_ix_for_depth > 0 {
254 let edit = &edits[first_edit_ix_for_depth - 1];
255 old_start_byte = edit.old.end.0 + (start_byte - edit.new.end.0);
256 }
257
258 let mut layer = layer.clone();
259 for edit in &edits[first_edit_ix_for_depth..] {
260 // Ignore any edits that follow this layer.
261 if edit.new.start.0 > end_byte {
262 break;
263 }
264
265 // Apply any edits that intersect this layer to the layer's syntax tree.
266 let tree_edit = if edit.old.start.0 >= old_start_byte {
267 tree_sitter::InputEdit {
268 start_byte: edit.new.start.0 - start_byte,
269 old_end_byte: edit.new.start.0 - start_byte
270 + (edit.old.end.0 - edit.old.start.0),
271 new_end_byte: edit.new.end.0 - start_byte,
272 start_position: (edit.new.start.1 - start_point).to_ts_point(),
273 old_end_position: (edit.new.start.1 - start_point
274 + (edit.old.end.1 - edit.old.start.1))
275 .to_ts_point(),
276 new_end_position: (edit.new.end.1 - start_point).to_ts_point(),
277 }
278 } else {
279 let node = layer.tree.root_node();
280 tree_sitter::InputEdit {
281 start_byte: 0,
282 old_end_byte: node.end_byte(),
283 new_end_byte: 0,
284 start_position: Default::default(),
285 old_end_position: node.end_position(),
286 new_end_position: Default::default(),
287 }
288 };
289
290 layer.tree.edit(&tree_edit);
291 }
292
293 debug_assert!(
294 layer.tree.root_node().end_byte() <= text.len(),
295 "tree's size {}, is larger than text size {}",
296 layer.tree.root_node().end_byte(),
297 text.len(),
298 );
299
300 layers.push(layer, text);
301 cursor.next(text);
302 }
303
304 layers.push_tree(cursor.suffix(&text), &text);
305 drop(cursor);
306 self.layers = layers;
307 }
308
309 pub fn reparse(
310 &mut self,
311 from_version: &clock::Global,
312 text: &BufferSnapshot,
313 registry: Option<Arc<LanguageRegistry>>,
314 language: Arc<Language>,
315 ) {
316 let edits = text.edits_since::<usize>(from_version).collect::<Vec<_>>();
317 let max_depth = self.layers.summary().max_depth;
318 let mut cursor = self.layers.cursor::<SyntaxLayerSummary>();
319 cursor.next(&text);
320 let mut layers = SumTree::new();
321
322 let mut changed_regions = ChangeRegionSet::default();
323 let mut queue = BinaryHeap::new();
324 queue.push(ReparseStep {
325 depth: 0,
326 language: language.clone(),
327 ranges: Vec::new(),
328 range: Anchor::MIN..Anchor::MAX,
329 });
330
331 loop {
332 let step = queue.pop();
333 let (depth, range) = if let Some(step) = &step {
334 (step.depth, step.range.clone())
335 } else {
336 (max_depth + 1, Anchor::MAX..Anchor::MAX)
337 };
338
339 let target = DepthAndRange(depth, range.clone());
340 let mut done = cursor.item().is_none();
341 while !done && target.cmp(&cursor.end(text), &text).is_gt() {
342 done = true;
343
344 let bounded_target =
345 DepthAndRangeOrMaxPosition(target.clone(), changed_regions.start_position());
346 if bounded_target.cmp(&cursor.start(), &text).is_gt() {
347 let slice = cursor.slice(&bounded_target, Bias::Left, text);
348 if !slice.is_empty() {
349 layers.push_tree(slice, &text);
350 if changed_regions.prune(cursor.end(text), text) {
351 done = false;
352 }
353 }
354 }
355
356 while target.cmp(&cursor.end(text), text).is_gt() {
357 let layer = if let Some(layer) = cursor.item() {
358 layer
359 } else {
360 break;
361 };
362
363 if changed_regions.intersects(&layer, text) {
364 changed_regions.insert(
365 ChangedRegion {
366 depth: layer.depth + 1,
367 range: layer.range.clone(),
368 },
369 text,
370 );
371 } else {
372 layers.push(layer.clone(), text);
373 }
374
375 cursor.next(text);
376 if changed_regions.prune(cursor.end(text), text) {
377 done = false;
378 }
379 }
380 }
381
382 let (ranges, language) = if let Some(step) = step {
383 (step.ranges, step.language)
384 } else {
385 break;
386 };
387
388 let start_point;
389 let start_byte;
390 let end_byte;
391 if let Some((first, last)) = ranges.first().zip(ranges.last()) {
392 start_point = first.start_point;
393 start_byte = first.start_byte;
394 end_byte = last.end_byte;
395 } else {
396 start_point = Point::zero().to_ts_point();
397 start_byte = 0;
398 end_byte = text.len();
399 };
400
401 let mut old_layer = cursor.item();
402 if let Some(layer) = old_layer {
403 if layer.range.to_offset(text) == (start_byte..end_byte) {
404 cursor.next(&text);
405 } else {
406 old_layer = None;
407 }
408 }
409
410 let grammar = if let Some(grammar) = language.grammar.as_deref() {
411 grammar
412 } else {
413 continue;
414 };
415
416 let tree;
417 let changed_ranges;
418 if let Some(old_layer) = old_layer {
419 tree = parse_text(
420 grammar,
421 text.as_rope(),
422 Some(old_layer.tree.clone()),
423 ranges,
424 );
425 changed_ranges = join_ranges(
426 edits
427 .iter()
428 .map(|e| e.new.clone())
429 .filter(|range| range.start < end_byte && range.end > start_byte),
430 old_layer
431 .tree
432 .changed_ranges(&tree)
433 .map(|r| start_byte + r.start_byte..start_byte + r.end_byte),
434 );
435 } else {
436 tree = parse_text(grammar, text.as_rope(), None, ranges);
437 changed_ranges = vec![start_byte..end_byte];
438 }
439
440 layers.push(
441 SyntaxLayer {
442 depth,
443 range,
444 tree: tree.clone(),
445 language: language.clone(),
446 },
447 &text,
448 );
449
450 if let (Some((config, registry)), false) = (
451 grammar.injection_config.as_ref().zip(registry.as_ref()),
452 changed_ranges.is_empty(),
453 ) {
454 let depth = depth + 1;
455 for range in &changed_ranges {
456 changed_regions.insert(
457 ChangedRegion {
458 depth,
459 range: text.anchor_before(range.start)..text.anchor_after(range.end),
460 },
461 text,
462 );
463 }
464 get_injections(
465 config,
466 text,
467 tree.root_node_with_offset(start_byte, start_point),
468 registry,
469 depth,
470 &changed_ranges,
471 &mut queue,
472 );
473 }
474 }
475
476 drop(cursor);
477 self.layers = layers;
478 }
479
480 pub fn single_tree_captures<'a>(
481 range: Range<usize>,
482 text: &'a Rope,
483 tree: &'a Tree,
484 grammar: &'a Grammar,
485 query: fn(&Grammar) -> Option<&Query>,
486 ) -> SyntaxMapCaptures<'a> {
487 SyntaxMapCaptures::new(
488 range.clone(),
489 text,
490 [(grammar, 0, tree.root_node())].into_iter(),
491 query,
492 )
493 }
494
495 pub fn captures<'a>(
496 &'a self,
497 range: Range<usize>,
498 buffer: &'a BufferSnapshot,
499 query: fn(&Grammar) -> Option<&Query>,
500 ) -> SyntaxMapCaptures {
501 SyntaxMapCaptures::new(
502 range.clone(),
503 buffer.as_rope(),
504 self.layers_for_range(range, buffer).into_iter(),
505 query,
506 )
507 }
508
509 pub fn matches<'a>(
510 &'a self,
511 range: Range<usize>,
512 buffer: &'a BufferSnapshot,
513 query: fn(&Grammar) -> Option<&Query>,
514 ) -> SyntaxMapMatches {
515 SyntaxMapMatches::new(
516 range.clone(),
517 buffer.as_rope(),
518 self.layers_for_range(range, buffer).into_iter(),
519 query,
520 )
521 }
522
523 #[cfg(test)]
524 pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> {
525 self.layers_for_range(0..buffer.len(), buffer)
526 }
527
528 pub fn layers_for_range<'a, T: ToOffset>(
529 &self,
530 range: Range<T>,
531 buffer: &BufferSnapshot,
532 ) -> Vec<(&Grammar, usize, Node)> {
533 let start = buffer.anchor_before(range.start.to_offset(buffer));
534 let end = buffer.anchor_after(range.end.to_offset(buffer));
535
536 let mut cursor = self.layers.filter::<_, ()>(|summary| {
537 if summary.max_depth > summary.min_depth {
538 true
539 } else {
540 let is_before_start = summary.range.end.cmp(&start, buffer).is_lt();
541 let is_after_end = summary.range.start.cmp(&end, buffer).is_gt();
542 !is_before_start && !is_after_end
543 }
544 });
545
546 let mut result = Vec::new();
547 cursor.next(buffer);
548 while let Some(layer) = cursor.item() {
549 if let Some(grammar) = &layer.language.grammar {
550 result.push((
551 grammar.as_ref(),
552 layer.depth,
553 layer.tree.root_node_with_offset(
554 layer.range.start.to_offset(buffer),
555 layer.range.start.to_point(buffer).to_ts_point(),
556 ),
557 ));
558 }
559 cursor.next(buffer)
560 }
561
562 result
563 }
564}
565
566impl<'a> SyntaxMapCaptures<'a> {
567 fn new(
568 range: Range<usize>,
569 text: &'a Rope,
570 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
571 query: fn(&Grammar) -> Option<&Query>,
572 ) -> Self {
573 let mut result = Self {
574 layers: Vec::new(),
575 grammars: Vec::new(),
576 active_layer_count: 0,
577 };
578 for (grammar, depth, node) in layers {
579 let query = if let Some(query) = query(grammar) {
580 query
581 } else {
582 continue;
583 };
584
585 let mut query_cursor = QueryCursorHandle::new();
586
587 // TODO - add a Tree-sitter API to remove the need for this.
588 let cursor = unsafe {
589 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
590 };
591
592 cursor.set_byte_range(range.clone());
593 let captures = cursor.captures(query, node, TextProvider(text));
594 let grammar_index = result
595 .grammars
596 .iter()
597 .position(|g| g.id == grammar.id())
598 .unwrap_or_else(|| {
599 result.grammars.push(grammar);
600 result.grammars.len() - 1
601 });
602 let mut layer = SyntaxMapCapturesLayer {
603 depth,
604 grammar_index,
605 next_capture: None,
606 captures,
607 _query_cursor: query_cursor,
608 };
609
610 layer.advance();
611 if layer.next_capture.is_some() {
612 let key = layer.sort_key();
613 let ix = match result.layers[..result.active_layer_count]
614 .binary_search_by_key(&key, |layer| layer.sort_key())
615 {
616 Ok(ix) | Err(ix) => ix,
617 };
618 result.layers.insert(ix, layer);
619 result.active_layer_count += 1;
620 } else {
621 result.layers.push(layer);
622 }
623 }
624
625 result
626 }
627
628 pub fn grammars(&self) -> &[&'a Grammar] {
629 &self.grammars
630 }
631
632 pub fn peek(&self) -> Option<SyntaxMapCapture<'a>> {
633 let layer = self.layers[..self.active_layer_count].first()?;
634 let capture = layer.next_capture?;
635 Some(SyntaxMapCapture {
636 depth: layer.depth,
637 grammar_index: layer.grammar_index,
638 index: capture.index,
639 node: capture.node,
640 })
641 }
642
643 pub fn advance(&mut self) -> bool {
644 let layer = if let Some(layer) = self.layers[..self.active_layer_count].first_mut() {
645 layer
646 } else {
647 return false;
648 };
649
650 layer.advance();
651 if layer.next_capture.is_some() {
652 let key = layer.sort_key();
653 let i = 1 + self.layers[1..self.active_layer_count]
654 .iter()
655 .position(|later_layer| key < later_layer.sort_key())
656 .unwrap_or(self.active_layer_count - 1);
657 self.layers[0..i].rotate_left(1);
658 } else {
659 self.layers[0..self.active_layer_count].rotate_left(1);
660 self.active_layer_count -= 1;
661 }
662
663 true
664 }
665
666 pub fn set_byte_range(&mut self, range: Range<usize>) {
667 for layer in &mut self.layers {
668 layer.captures.set_byte_range(range.clone());
669 if let Some(capture) = &layer.next_capture {
670 if capture.node.end_byte() > range.start {
671 continue;
672 }
673 }
674 layer.advance();
675 }
676 self.layers.sort_unstable_by_key(|layer| layer.sort_key());
677 self.active_layer_count = self
678 .layers
679 .iter()
680 .position(|layer| layer.next_capture.is_none())
681 .unwrap_or(self.layers.len());
682 }
683}
684
685impl<'a> SyntaxMapMatches<'a> {
686 fn new(
687 range: Range<usize>,
688 text: &'a Rope,
689 layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
690 query: fn(&Grammar) -> Option<&Query>,
691 ) -> Self {
692 let mut result = Self::default();
693 for (grammar, depth, node) in layers {
694 let query = if let Some(query) = query(grammar) {
695 query
696 } else {
697 continue;
698 };
699
700 let mut query_cursor = QueryCursorHandle::new();
701
702 // TODO - add a Tree-sitter API to remove the need for this.
703 let cursor = unsafe {
704 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
705 };
706
707 cursor.set_byte_range(range.clone());
708 let matches = cursor.matches(query, node, TextProvider(text));
709 let grammar_index = result
710 .grammars
711 .iter()
712 .position(|g| g.id == grammar.id())
713 .unwrap_or_else(|| {
714 result.grammars.push(grammar);
715 result.grammars.len() - 1
716 });
717 let mut layer = SyntaxMapMatchesLayer {
718 depth,
719 grammar_index,
720 matches,
721 next_pattern_index: 0,
722 next_captures: Vec::new(),
723 has_next: false,
724 _query_cursor: query_cursor,
725 };
726
727 layer.advance();
728 if layer.has_next {
729 let key = layer.sort_key();
730 let ix = match result.layers[..result.active_layer_count]
731 .binary_search_by_key(&key, |layer| layer.sort_key())
732 {
733 Ok(ix) | Err(ix) => ix,
734 };
735 result.layers.insert(ix, layer);
736 result.active_layer_count += 1;
737 } else {
738 result.layers.push(layer);
739 }
740 }
741 result
742 }
743
744 pub fn grammars(&self) -> &[&'a Grammar] {
745 &self.grammars
746 }
747
748 pub fn peek(&self) -> Option<SyntaxMapMatch> {
749 let layer = self.layers.first()?;
750 if !layer.has_next {
751 return None;
752 }
753 Some(SyntaxMapMatch {
754 depth: layer.depth,
755 grammar_index: layer.grammar_index,
756 pattern_index: layer.next_pattern_index,
757 captures: &layer.next_captures,
758 })
759 }
760
761 pub fn advance(&mut self) -> bool {
762 let layer = if let Some(layer) = self.layers.first_mut() {
763 layer
764 } else {
765 return false;
766 };
767
768 layer.advance();
769 if layer.has_next {
770 let key = layer.sort_key();
771 let i = 1 + self.layers[1..self.active_layer_count]
772 .iter()
773 .position(|later_layer| key < later_layer.sort_key())
774 .unwrap_or(self.active_layer_count - 1);
775 self.layers[0..i].rotate_left(1);
776 } else {
777 self.layers[0..self.active_layer_count].rotate_left(1);
778 self.active_layer_count -= 1;
779 }
780
781 true
782 }
783}
784
785impl<'a> SyntaxMapCapturesLayer<'a> {
786 fn advance(&mut self) {
787 self.next_capture = self.captures.next().map(|(mat, ix)| mat.captures[ix]);
788 }
789
790 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
791 if let Some(capture) = &self.next_capture {
792 let range = capture.node.byte_range();
793 (range.start, Reverse(range.end), self.depth)
794 } else {
795 (usize::MAX, Reverse(0), usize::MAX)
796 }
797 }
798}
799
800impl<'a> SyntaxMapMatchesLayer<'a> {
801 fn advance(&mut self) {
802 if let Some(mat) = self.matches.next() {
803 self.next_captures.clear();
804 self.next_captures.extend_from_slice(&mat.captures);
805 self.next_pattern_index = mat.pattern_index;
806 self.has_next = true;
807 } else {
808 self.has_next = false;
809 }
810 }
811
812 fn sort_key(&self) -> (usize, Reverse<usize>, usize) {
813 if self.has_next {
814 let captures = &self.next_captures;
815 if let Some((first, last)) = captures.first().zip(captures.last()) {
816 return (
817 first.node.start_byte(),
818 Reverse(last.node.end_byte()),
819 self.depth,
820 );
821 }
822 }
823 (usize::MAX, Reverse(0), usize::MAX)
824 }
825}
826
827impl<'a> Iterator for SyntaxMapCaptures<'a> {
828 type Item = SyntaxMapCapture<'a>;
829
830 fn next(&mut self) -> Option<Self::Item> {
831 let result = self.peek();
832 self.advance();
833 result
834 }
835}
836
837fn join_ranges(
838 a: impl Iterator<Item = Range<usize>>,
839 b: impl Iterator<Item = Range<usize>>,
840) -> Vec<Range<usize>> {
841 let mut result = Vec::<Range<usize>>::new();
842 let mut a = a.peekable();
843 let mut b = b.peekable();
844 loop {
845 let range = match (a.peek(), b.peek()) {
846 (Some(range_a), Some(range_b)) => {
847 if range_a.start < range_b.start {
848 a.next().unwrap()
849 } else {
850 b.next().unwrap()
851 }
852 }
853 (None, Some(_)) => b.next().unwrap(),
854 (Some(_), None) => a.next().unwrap(),
855 (None, None) => break,
856 };
857
858 if let Some(last) = result.last_mut() {
859 if range.start <= last.end {
860 last.end = last.end.max(range.end);
861 continue;
862 }
863 }
864 result.push(range);
865 }
866 result
867}
868
869fn parse_text(
870 grammar: &Grammar,
871 text: &Rope,
872 old_tree: Option<Tree>,
873 mut ranges: Vec<tree_sitter::Range>,
874) -> Tree {
875 let (start_byte, start_point) = ranges
876 .first()
877 .map(|range| (range.start_byte, Point::from_ts_point(range.start_point)))
878 .unwrap_or_default();
879
880 for range in &mut ranges {
881 range.start_byte -= start_byte;
882 range.end_byte -= start_byte;
883 range.start_point = (Point::from_ts_point(range.start_point) - start_point).to_ts_point();
884 range.end_point = (Point::from_ts_point(range.end_point) - start_point).to_ts_point();
885 }
886
887 PARSER.with(|parser| {
888 let mut parser = parser.borrow_mut();
889 let mut chunks = text.chunks_in_range(start_byte..text.len());
890 parser
891 .set_included_ranges(&ranges)
892 .expect("overlapping ranges");
893 parser
894 .set_language(grammar.ts_language)
895 .expect("incompatible grammar");
896 parser
897 .parse_with(
898 &mut move |offset, _| {
899 chunks.seek(start_byte + offset);
900 chunks.next().unwrap_or("").as_bytes()
901 },
902 old_tree.as_ref(),
903 )
904 .expect("invalid language")
905 })
906}
907
908fn get_injections(
909 config: &InjectionConfig,
910 text: &BufferSnapshot,
911 node: Node,
912 language_registry: &LanguageRegistry,
913 depth: usize,
914 query_ranges: &[Range<usize>],
915 queue: &mut BinaryHeap<ReparseStep>,
916) -> bool {
917 let mut result = false;
918 let mut query_cursor = QueryCursorHandle::new();
919 let mut prev_match = None;
920 for query_range in query_ranges {
921 query_cursor.set_byte_range(query_range.start.saturating_sub(1)..query_range.end);
922 for mat in query_cursor.matches(&config.query, node, TextProvider(text.as_rope())) {
923 let content_ranges = mat
924 .nodes_for_capture_index(config.content_capture_ix)
925 .map(|node| node.range())
926 .collect::<Vec<_>>();
927 if content_ranges.is_empty() {
928 continue;
929 }
930
931 // Avoid duplicate matches if two changed ranges intersect the same injection.
932 let content_range =
933 content_ranges.first().unwrap().start_byte..content_ranges.last().unwrap().end_byte;
934 if let Some((last_pattern_ix, last_range)) = &prev_match {
935 if mat.pattern_index == *last_pattern_ix && content_range == *last_range {
936 continue;
937 }
938 }
939 prev_match = Some((mat.pattern_index, content_range.clone()));
940
941 let language_name = config.languages_by_pattern_ix[mat.pattern_index]
942 .as_ref()
943 .map(|s| Cow::Borrowed(s.as_ref()))
944 .or_else(|| {
945 let ix = config.language_capture_ix?;
946 let node = mat.nodes_for_capture_index(ix).next()?;
947 Some(Cow::Owned(text.text_for_range(node.byte_range()).collect()))
948 });
949
950 if let Some(language_name) = language_name {
951 if let Some(language) = language_registry.get_language(language_name.as_ref()) {
952 result = true;
953 let range = text.anchor_before(content_range.start)
954 ..text.anchor_after(content_range.end);
955 queue.push(ReparseStep {
956 depth,
957 language,
958 ranges: content_ranges,
959 range,
960 })
961 }
962 }
963 }
964 }
965 result
966}
967
968impl std::ops::Deref for SyntaxMap {
969 type Target = SyntaxSnapshot;
970
971 fn deref(&self) -> &Self::Target {
972 &self.snapshot
973 }
974}
975
976impl PartialEq for ReparseStep {
977 fn eq(&self, _: &Self) -> bool {
978 false
979 }
980}
981
982impl Eq for ReparseStep {}
983
984impl PartialOrd for ReparseStep {
985 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
986 Some(self.cmp(&other))
987 }
988}
989
990impl Ord for ReparseStep {
991 fn cmp(&self, other: &Self) -> Ordering {
992 let range_a = self.range();
993 let range_b = other.range();
994 Ord::cmp(&other.depth, &self.depth)
995 .then_with(|| Ord::cmp(&range_b.start, &range_a.start))
996 .then_with(|| Ord::cmp(&range_a.end, &range_b.end))
997 }
998}
999
1000impl ReparseStep {
1001 fn range(&self) -> Range<usize> {
1002 let start = self.ranges.first().map_or(0, |r| r.start_byte);
1003 let end = self.ranges.last().map_or(0, |r| r.end_byte);
1004 start..end
1005 }
1006}
1007
1008impl ChangedRegion {
1009 fn cmp(&self, other: &Self, buffer: &BufferSnapshot) -> Ordering {
1010 let range_a = &self.range;
1011 let range_b = &other.range;
1012 Ord::cmp(&self.depth, &other.depth)
1013 .then_with(|| range_a.start.cmp(&range_b.start, buffer))
1014 .then_with(|| range_b.end.cmp(&range_a.end, buffer))
1015 }
1016}
1017
1018impl ChangeRegionSet {
1019 fn start_position(&self) -> DepthAndMaxPosition {
1020 self.0
1021 .first()
1022 .map_or(DepthAndMaxPosition(usize::MAX, Anchor::MAX), |region| {
1023 DepthAndMaxPosition(region.depth, region.range.start)
1024 })
1025 }
1026
1027 fn intersects(&self, layer: &SyntaxLayer, text: &BufferSnapshot) -> bool {
1028 for region in &self.0 {
1029 if region.depth < layer.depth {
1030 continue;
1031 }
1032 if region.depth > layer.depth {
1033 break;
1034 }
1035 if region.range.end.cmp(&layer.range.start, text).is_le() {
1036 continue;
1037 }
1038 if region.range.start.cmp(&layer.range.end, text).is_ge() {
1039 break;
1040 }
1041 return true;
1042 }
1043 false
1044 }
1045
1046 fn insert(&mut self, region: ChangedRegion, text: &BufferSnapshot) {
1047 if let Err(ix) = self.0.binary_search_by(|probe| probe.cmp(®ion, text)) {
1048 self.0.insert(ix, region);
1049 }
1050 }
1051
1052 fn prune(&mut self, summary: SyntaxLayerSummary, text: &BufferSnapshot) -> bool {
1053 let prev_len = self.0.len();
1054 self.0.retain(|region| {
1055 region.depth > summary.max_depth
1056 || (region.depth == summary.max_depth
1057 && region
1058 .range
1059 .end
1060 .cmp(&summary.last_layer_range.start, text)
1061 .is_gt())
1062 });
1063 self.0.len() < prev_len
1064 }
1065}
1066
1067impl Default for SyntaxLayerSummary {
1068 fn default() -> Self {
1069 Self {
1070 max_depth: 0,
1071 min_depth: 0,
1072 range: Anchor::MAX..Anchor::MIN,
1073 last_layer_range: Anchor::MIN..Anchor::MAX,
1074 }
1075 }
1076}
1077
1078impl sum_tree::Summary for SyntaxLayerSummary {
1079 type Context = BufferSnapshot;
1080
1081 fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
1082 if other.max_depth > self.max_depth {
1083 self.max_depth = other.max_depth;
1084 self.range = other.range.clone();
1085 } else {
1086 if other.range.start.cmp(&self.range.start, buffer).is_lt() {
1087 self.range.start = other.range.start;
1088 }
1089 if other.range.end.cmp(&self.range.end, buffer).is_gt() {
1090 self.range.end = other.range.end;
1091 }
1092 }
1093 self.last_layer_range = other.last_layer_range.clone();
1094 }
1095}
1096
1097impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRange {
1098 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1099 Ord::cmp(&self.0, &cursor_location.max_depth)
1100 .then_with(|| {
1101 self.1
1102 .start
1103 .cmp(&cursor_location.last_layer_range.start, buffer)
1104 })
1105 .then_with(|| {
1106 cursor_location
1107 .last_layer_range
1108 .end
1109 .cmp(&self.1.end, buffer)
1110 })
1111 }
1112}
1113
1114impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndMaxPosition {
1115 fn cmp(&self, cursor_location: &SyntaxLayerSummary, text: &BufferSnapshot) -> Ordering {
1116 Ord::cmp(&self.0, &cursor_location.max_depth)
1117 .then_with(|| self.1.cmp(&cursor_location.range.end, text))
1118 }
1119}
1120
1121impl<'a> SeekTarget<'a, SyntaxLayerSummary, SyntaxLayerSummary> for DepthAndRangeOrMaxPosition {
1122 fn cmp(&self, cursor_location: &SyntaxLayerSummary, buffer: &BufferSnapshot) -> Ordering {
1123 if self.1.cmp(cursor_location, buffer).is_le() {
1124 return Ordering::Less;
1125 } else {
1126 self.0.cmp(cursor_location, buffer)
1127 }
1128 }
1129}
1130
1131impl sum_tree::Item for SyntaxLayer {
1132 type Summary = SyntaxLayerSummary;
1133
1134 fn summary(&self) -> Self::Summary {
1135 SyntaxLayerSummary {
1136 min_depth: self.depth,
1137 max_depth: self.depth,
1138 range: self.range.clone(),
1139 last_layer_range: self.range.clone(),
1140 }
1141 }
1142}
1143
1144impl std::fmt::Debug for SyntaxLayer {
1145 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1146 f.debug_struct("SyntaxLayer")
1147 .field("depth", &self.depth)
1148 .field("range", &self.range)
1149 .field("tree", &self.tree)
1150 .finish()
1151 }
1152}
1153
1154impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1155 type I = ByteChunks<'a>;
1156
1157 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1158 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1159 }
1160}
1161
1162impl<'a> Iterator for ByteChunks<'a> {
1163 type Item = &'a [u8];
1164
1165 fn next(&mut self) -> Option<Self::Item> {
1166 self.0.next().map(str::as_bytes)
1167 }
1168}
1169
1170impl QueryCursorHandle {
1171 pub(crate) fn new() -> Self {
1172 let mut cursor = QUERY_CURSORS.lock().pop().unwrap_or_else(QueryCursor::new);
1173 cursor.set_match_limit(64);
1174 QueryCursorHandle(Some(cursor))
1175 }
1176}
1177
1178impl Deref for QueryCursorHandle {
1179 type Target = QueryCursor;
1180
1181 fn deref(&self) -> &Self::Target {
1182 self.0.as_ref().unwrap()
1183 }
1184}
1185
1186impl DerefMut for QueryCursorHandle {
1187 fn deref_mut(&mut self) -> &mut Self::Target {
1188 self.0.as_mut().unwrap()
1189 }
1190}
1191
1192impl Drop for QueryCursorHandle {
1193 fn drop(&mut self) {
1194 let mut cursor = self.0.take().unwrap();
1195 cursor.set_byte_range(0..usize::MAX);
1196 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1197 QUERY_CURSORS.lock().push(cursor)
1198 }
1199}
1200
1201pub(crate) trait ToTreeSitterPoint {
1202 fn to_ts_point(self) -> tree_sitter::Point;
1203 fn from_ts_point(point: tree_sitter::Point) -> Self;
1204}
1205
1206impl ToTreeSitterPoint for Point {
1207 fn to_ts_point(self) -> tree_sitter::Point {
1208 tree_sitter::Point::new(self.row as usize, self.column as usize)
1209 }
1210
1211 fn from_ts_point(point: tree_sitter::Point) -> Self {
1212 Point::new(point.row as u32, point.column as u32)
1213 }
1214}
1215
1216#[cfg(test)]
1217mod tests {
1218 use super::*;
1219 use crate::LanguageConfig;
1220 use rand::rngs::StdRng;
1221 use std::env;
1222 use text::{Buffer, Point};
1223 use unindent::Unindent as _;
1224 use util::test::marked_text_ranges;
1225
1226 #[gpui::test]
1227 fn test_syntax_map_layers_for_range() {
1228 let registry = Arc::new(LanguageRegistry::test());
1229 let language = Arc::new(rust_lang());
1230 registry.add(language.clone());
1231
1232 let mut buffer = Buffer::new(
1233 0,
1234 0,
1235 r#"
1236 fn a() {
1237 assert_eq!(
1238 b(vec![C {}]),
1239 vec![d.e],
1240 );
1241 println!("{}", f(|_| true));
1242 }
1243 "#
1244 .unindent(),
1245 );
1246
1247 let mut syntax_map = SyntaxMap::new();
1248 syntax_map.set_language_registry(registry.clone());
1249 syntax_map.reparse(language.clone(), &buffer);
1250
1251 assert_layers_for_range(
1252 &syntax_map,
1253 &buffer,
1254 Point::new(2, 0)..Point::new(2, 0),
1255 &[
1256 "...(function_item ... (block (expression_statement (macro_invocation...",
1257 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1258 ],
1259 );
1260 assert_layers_for_range(
1261 &syntax_map,
1262 &buffer,
1263 Point::new(2, 14)..Point::new(2, 16),
1264 &[
1265 "...(function_item ...",
1266 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1267 "...(array_expression (struct_expression ...",
1268 ],
1269 );
1270 assert_layers_for_range(
1271 &syntax_map,
1272 &buffer,
1273 Point::new(3, 14)..Point::new(3, 16),
1274 &[
1275 "...(function_item ...",
1276 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1277 "...(array_expression (field_expression ...",
1278 ],
1279 );
1280 assert_layers_for_range(
1281 &syntax_map,
1282 &buffer,
1283 Point::new(5, 12)..Point::new(5, 16),
1284 &[
1285 "...(function_item ...",
1286 "...(call_expression ... (arguments (closure_expression ...",
1287 ],
1288 );
1289
1290 // Replace a vec! macro invocation with a plain slice, removing a syntactic layer.
1291 let macro_name_range = range_for_text(&buffer, "vec!");
1292 buffer.edit([(macro_name_range, "&")]);
1293 syntax_map.interpolate(&buffer);
1294 syntax_map.reparse(language.clone(), &buffer);
1295
1296 assert_layers_for_range(
1297 &syntax_map,
1298 &buffer,
1299 Point::new(2, 14)..Point::new(2, 16),
1300 &[
1301 "...(function_item ...",
1302 "...(tuple_expression (call_expression ... arguments: (arguments (reference_expression value: (array_expression...",
1303 ],
1304 );
1305
1306 // Put the vec! macro back, adding back the syntactic layer.
1307 buffer.undo();
1308 syntax_map.interpolate(&buffer);
1309 syntax_map.reparse(language.clone(), &buffer);
1310
1311 assert_layers_for_range(
1312 &syntax_map,
1313 &buffer,
1314 Point::new(2, 14)..Point::new(2, 16),
1315 &[
1316 "...(function_item ...",
1317 "...(tuple_expression (call_expression ... arguments: (arguments (macro_invocation...",
1318 "...(array_expression (struct_expression ...",
1319 ],
1320 );
1321 }
1322
1323 #[gpui::test]
1324 fn test_typing_multiple_new_injections() {
1325 let (buffer, syntax_map) = test_edit_sequence(&[
1326 "fn a() { dbg }",
1327 "fn a() { dbg«!» }",
1328 "fn a() { dbg!«()» }",
1329 "fn a() { dbg!(«b») }",
1330 "fn a() { dbg!(b«.») }",
1331 "fn a() { dbg!(b.«c») }",
1332 "fn a() { dbg!(b.c«()») }",
1333 "fn a() { dbg!(b.c(«vec»)) }",
1334 "fn a() { dbg!(b.c(vec«!»)) }",
1335 "fn a() { dbg!(b.c(vec!«[]»)) }",
1336 "fn a() { dbg!(b.c(vec![«d»])) }",
1337 "fn a() { dbg!(b.c(vec![d«.»])) }",
1338 "fn a() { dbg!(b.c(vec![d.«e»])) }",
1339 ]);
1340
1341 assert_capture_ranges(
1342 &syntax_map,
1343 &buffer,
1344 &["field"],
1345 "fn a() { dbg!(b.«c»(vec![d.«e»])) }",
1346 );
1347 }
1348
1349 #[gpui::test]
1350 fn test_pasting_new_injection_line_between_others() {
1351 let (buffer, syntax_map) = test_edit_sequence(&[
1352 "
1353 fn a() {
1354 b!(B {});
1355 c!(C {});
1356 d!(D {});
1357 e!(E {});
1358 f!(F {});
1359 g!(G {});
1360 }
1361 ",
1362 "
1363 fn a() {
1364 b!(B {});
1365 c!(C {});
1366 d!(D {});
1367 « h!(H {});
1368 » e!(E {});
1369 f!(F {});
1370 g!(G {});
1371 }
1372 ",
1373 ]);
1374
1375 assert_capture_ranges(
1376 &syntax_map,
1377 &buffer,
1378 &["struct"],
1379 "
1380 fn a() {
1381 b!(«B {}»);
1382 c!(«C {}»);
1383 d!(«D {}»);
1384 h!(«H {}»);
1385 e!(«E {}»);
1386 f!(«F {}»);
1387 g!(«G {}»);
1388 }
1389 ",
1390 );
1391 }
1392
1393 #[gpui::test]
1394 fn test_joining_injections_with_child_injections() {
1395 let (buffer, syntax_map) = test_edit_sequence(&[
1396 "
1397 fn a() {
1398 b!(
1399 c![one.two.three],
1400 d![four.five.six],
1401 );
1402 e!(
1403 f![seven.eight],
1404 );
1405 }
1406 ",
1407 "
1408 fn a() {
1409 b!(
1410 c![one.two.three],
1411 d![four.five.six],
1412 ˇ f![seven.eight],
1413 );
1414 }
1415 ",
1416 ]);
1417
1418 assert_capture_ranges(
1419 &syntax_map,
1420 &buffer,
1421 &["field"],
1422 "
1423 fn a() {
1424 b!(
1425 c![one.«two».«three»],
1426 d![four.«five».«six»],
1427 f![seven.«eight»],
1428 );
1429 }
1430 ",
1431 );
1432 }
1433
1434 #[gpui::test]
1435 fn test_editing_edges_of_injection() {
1436 test_edit_sequence(&[
1437 "
1438 fn a() {
1439 b!(c!())
1440 }
1441 ",
1442 "
1443 fn a() {
1444 «d»!(c!())
1445 }
1446 ",
1447 "
1448 fn a() {
1449 «e»d!(c!())
1450 }
1451 ",
1452 "
1453 fn a() {
1454 ed!«[»c!()«]»
1455 }
1456 ",
1457 ]);
1458 }
1459
1460 #[gpui::test]
1461 fn test_edits_preceding_and_intersecting_injection() {
1462 test_edit_sequence(&[
1463 //
1464 "const aaaaaaaaaaaa: B = c!(d(e.f));",
1465 "const aˇa: B = c!(d(eˇ));",
1466 ]);
1467 }
1468
1469 #[gpui::test]
1470 fn test_non_local_changes_create_injections() {
1471 test_edit_sequence(&[
1472 "
1473 // a! {
1474 static B: C = d;
1475 // }
1476 ",
1477 "
1478 ˇa! {
1479 static B: C = d;
1480 ˇ}
1481 ",
1482 ]);
1483 }
1484
1485 #[gpui::test]
1486 fn test_creating_many_injections_in_one_edit() {
1487 test_edit_sequence(&[
1488 "
1489 fn a() {
1490 one(Two::three(3));
1491 four(Five::six(6));
1492 seven(Eight::nine(9));
1493 }
1494 ",
1495 "
1496 fn a() {
1497 one«!»(Two::three(3));
1498 four«!»(Five::six(6));
1499 seven«!»(Eight::nine(9));
1500 }
1501 ",
1502 "
1503 fn a() {
1504 one!(Two::three«!»(3));
1505 four!(Five::six«!»(6));
1506 seven!(Eight::nine«!»(9));
1507 }
1508 ",
1509 ]);
1510 }
1511
1512 #[gpui::test]
1513 fn test_editing_across_injection_boundary() {
1514 test_edit_sequence(&[
1515 "
1516 fn one() {
1517 two();
1518 three!(
1519 three.four,
1520 five.six,
1521 );
1522 }
1523 ",
1524 "
1525 fn one() {
1526 two();
1527 th«irty_five![»
1528 three.four,
1529 five.six,
1530 « seven.eight,
1531 ];»
1532 }
1533 ",
1534 ]);
1535 }
1536
1537 #[gpui::test]
1538 fn test_removing_injection_by_replacing_across_boundary() {
1539 test_edit_sequence(&[
1540 "
1541 fn one() {
1542 two!(
1543 three.four,
1544 );
1545 }
1546 ",
1547 "
1548 fn one() {
1549 t«en
1550 .eleven(
1551 twelve,
1552 »
1553 three.four,
1554 );
1555 }
1556 ",
1557 ]);
1558 }
1559
1560 #[gpui::test(iterations = 100)]
1561 fn test_random_syntax_map_edits(mut rng: StdRng) {
1562 let operations = env::var("OPERATIONS")
1563 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
1564 .unwrap_or(10);
1565
1566 let text = r#"
1567 fn test_something() {
1568 let vec = vec![5, 1, 3, 8];
1569 assert_eq!(
1570 vec
1571 .into_iter()
1572 .map(|i| i * 2)
1573 .collect::<Vec<usize>>(),
1574 vec![
1575 5 * 2, 1 * 2, 3 * 2, 8 * 2
1576 ],
1577 );
1578 }
1579 "#
1580 .unindent()
1581 .repeat(2);
1582
1583 let registry = Arc::new(LanguageRegistry::test());
1584 let language = Arc::new(rust_lang());
1585 registry.add(language.clone());
1586 let mut buffer = Buffer::new(0, 0, text);
1587
1588 let mut syntax_map = SyntaxMap::new();
1589 syntax_map.set_language_registry(registry.clone());
1590 syntax_map.reparse(language.clone(), &buffer);
1591
1592 let mut reference_syntax_map = SyntaxMap::new();
1593 reference_syntax_map.set_language_registry(registry.clone());
1594
1595 log::info!("initial text:\n{}", buffer.text());
1596
1597 for i in 0..operations {
1598 let prev_buffer = buffer.snapshot();
1599 let prev_syntax_map = syntax_map.snapshot();
1600
1601 buffer.randomly_edit(&mut rng, 2);
1602 log::info!("text:\n{}", buffer.text());
1603
1604 syntax_map.interpolate(&buffer);
1605 check_interpolation(&prev_syntax_map, &syntax_map, &prev_buffer, &buffer);
1606
1607 syntax_map.reparse(language.clone(), &buffer);
1608
1609 reference_syntax_map.clear();
1610 reference_syntax_map.reparse(language.clone(), &buffer);
1611 assert_eq!(
1612 syntax_map.layers(&buffer).len(),
1613 reference_syntax_map.layers(&buffer).len(),
1614 "wrong number of layers after performing edit {i}"
1615 );
1616 }
1617
1618 for i in 0..operations {
1619 let i = operations - i - 1;
1620 buffer.undo();
1621 log::info!("undoing operation {}", i);
1622 log::info!("text:\n{}", buffer.text());
1623
1624 syntax_map.interpolate(&buffer);
1625 syntax_map.reparse(language.clone(), &buffer);
1626
1627 reference_syntax_map.clear();
1628 reference_syntax_map.reparse(language.clone(), &buffer);
1629 assert_eq!(
1630 syntax_map.layers(&buffer).len(),
1631 reference_syntax_map.layers(&buffer).len(),
1632 "wrong number of layers after undoing edit {i}"
1633 );
1634 }
1635
1636 let layers = syntax_map.layers(&buffer);
1637 let reference_layers = reference_syntax_map.layers(&buffer);
1638 for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
1639 {
1640 assert_eq!(edited_layer.2.to_sexp(), reference_layer.2.to_sexp());
1641 assert_eq!(edited_layer.2.range(), reference_layer.2.range());
1642 }
1643 }
1644
1645 fn check_interpolation(
1646 old_syntax_map: &SyntaxSnapshot,
1647 new_syntax_map: &SyntaxSnapshot,
1648 old_buffer: &BufferSnapshot,
1649 new_buffer: &BufferSnapshot,
1650 ) {
1651 let edits = new_buffer
1652 .edits_since::<usize>(&old_buffer.version())
1653 .collect::<Vec<_>>();
1654
1655 for (old_layer, new_layer) in old_syntax_map
1656 .layers
1657 .iter()
1658 .zip(new_syntax_map.layers.iter())
1659 {
1660 assert_eq!(old_layer.range, new_layer.range);
1661 let old_start_byte = old_layer.range.start.to_offset(old_buffer);
1662 let new_start_byte = new_layer.range.start.to_offset(new_buffer);
1663 let old_start_point = old_layer.range.start.to_point(old_buffer).to_ts_point();
1664 let new_start_point = new_layer.range.start.to_point(new_buffer).to_ts_point();
1665 let old_node = old_layer
1666 .tree
1667 .root_node_with_offset(old_start_byte, old_start_point);
1668 let new_node = new_layer
1669 .tree
1670 .root_node_with_offset(new_start_byte, new_start_point);
1671 check_node_edits(
1672 old_layer.depth,
1673 &old_layer.range,
1674 old_node,
1675 new_node,
1676 old_buffer,
1677 new_buffer,
1678 &edits,
1679 );
1680 }
1681
1682 fn check_node_edits(
1683 depth: usize,
1684 range: &Range<Anchor>,
1685 old_node: Node,
1686 new_node: Node,
1687 old_buffer: &BufferSnapshot,
1688 new_buffer: &BufferSnapshot,
1689 edits: &[text::Edit<usize>],
1690 ) {
1691 assert_eq!(old_node.kind(), new_node.kind());
1692
1693 let old_range = old_node.byte_range();
1694 let new_range = new_node.byte_range();
1695
1696 let is_edited = edits
1697 .iter()
1698 .any(|edit| edit.new.start < new_range.end && edit.new.end > new_range.start);
1699 if is_edited {
1700 assert!(
1701 new_node.has_changes(),
1702 concat!(
1703 "failed to mark node as edited.\n",
1704 "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
1705 "node kind: {}, old node range: {:?}, new node range: {:?}",
1706 ),
1707 depth,
1708 range.to_offset(old_buffer),
1709 range.to_offset(new_buffer),
1710 new_node.kind(),
1711 old_range,
1712 new_range,
1713 );
1714 }
1715
1716 if !new_node.has_changes() {
1717 assert_eq!(
1718 old_buffer
1719 .text_for_range(old_range.clone())
1720 .collect::<String>(),
1721 new_buffer
1722 .text_for_range(new_range.clone())
1723 .collect::<String>(),
1724 concat!(
1725 "mismatched text for node\n",
1726 "layer depth: {}, old layer range: {:?}, new layer range: {:?},\n",
1727 "node kind: {}, old node range:{:?}, new node range:{:?}",
1728 ),
1729 depth,
1730 range.to_offset(old_buffer),
1731 range.to_offset(new_buffer),
1732 new_node.kind(),
1733 old_range,
1734 new_range,
1735 );
1736 }
1737
1738 for i in 0..new_node.child_count() {
1739 check_node_edits(
1740 depth,
1741 range,
1742 old_node.child(i).unwrap(),
1743 new_node.child(i).unwrap(),
1744 old_buffer,
1745 new_buffer,
1746 edits,
1747 )
1748 }
1749 }
1750 }
1751
1752 fn test_edit_sequence(steps: &[&str]) -> (Buffer, SyntaxMap) {
1753 let registry = Arc::new(LanguageRegistry::test());
1754 let language = Arc::new(rust_lang());
1755 registry.add(language.clone());
1756 let mut buffer = Buffer::new(0, 0, Default::default());
1757
1758 let mut mutated_syntax_map = SyntaxMap::new();
1759 mutated_syntax_map.set_language_registry(registry.clone());
1760 mutated_syntax_map.reparse(language.clone(), &buffer);
1761
1762 for (i, marked_string) in steps.into_iter().enumerate() {
1763 edit_buffer(&mut buffer, &marked_string.unindent());
1764
1765 // Reparse the syntax map
1766 mutated_syntax_map.interpolate(&buffer);
1767 mutated_syntax_map.reparse(language.clone(), &buffer);
1768
1769 // Create a second syntax map from scratch
1770 let mut reference_syntax_map = SyntaxMap::new();
1771 reference_syntax_map.set_language_registry(registry.clone());
1772 reference_syntax_map.reparse(language.clone(), &buffer);
1773
1774 // Compare the mutated syntax map to the new syntax map
1775 let mutated_layers = mutated_syntax_map.layers(&buffer);
1776 let reference_layers = reference_syntax_map.layers(&buffer);
1777 assert_eq!(
1778 mutated_layers.len(),
1779 reference_layers.len(),
1780 "wrong number of layers at step {i}"
1781 );
1782 for (edited_layer, reference_layer) in
1783 mutated_layers.into_iter().zip(reference_layers.into_iter())
1784 {
1785 assert_eq!(
1786 edited_layer.2.to_sexp(),
1787 reference_layer.2.to_sexp(),
1788 "different layer at step {i}"
1789 );
1790 assert_eq!(
1791 edited_layer.2.range(),
1792 reference_layer.2.range(),
1793 "different layer at step {i}"
1794 );
1795 }
1796 }
1797
1798 (buffer, mutated_syntax_map)
1799 }
1800
1801 fn rust_lang() -> Language {
1802 Language::new(
1803 LanguageConfig {
1804 name: "Rust".into(),
1805 path_suffixes: vec!["rs".to_string()],
1806 ..Default::default()
1807 },
1808 Some(tree_sitter_rust::language()),
1809 )
1810 .with_highlights_query(
1811 r#"
1812 (field_identifier) @field
1813 (struct_expression) @struct
1814 "#,
1815 )
1816 .unwrap()
1817 .with_injection_query(
1818 r#"
1819 (macro_invocation
1820 (token_tree) @content
1821 (#set! "language" "rust"))
1822 "#,
1823 )
1824 .unwrap()
1825 }
1826
1827 fn range_for_text(buffer: &Buffer, text: &str) -> Range<usize> {
1828 let start = buffer.as_rope().to_string().find(text).unwrap();
1829 start..start + text.len()
1830 }
1831
1832 fn assert_layers_for_range(
1833 syntax_map: &SyntaxMap,
1834 buffer: &BufferSnapshot,
1835 range: Range<Point>,
1836 expected_layers: &[&str],
1837 ) {
1838 let layers = syntax_map.layers_for_range(range, &buffer);
1839 assert_eq!(
1840 layers.len(),
1841 expected_layers.len(),
1842 "wrong number of layers"
1843 );
1844 for (i, ((_, _, node), expected_s_exp)) in
1845 layers.iter().zip(expected_layers.iter()).enumerate()
1846 {
1847 let actual_s_exp = node.to_sexp();
1848 assert!(
1849 string_contains_sequence(
1850 &actual_s_exp,
1851 &expected_s_exp.split("...").collect::<Vec<_>>()
1852 ),
1853 "layer {i}:\n\nexpected: {expected_s_exp}\nactual: {actual_s_exp}",
1854 );
1855 }
1856 }
1857
1858 fn assert_capture_ranges(
1859 syntax_map: &SyntaxMap,
1860 buffer: &BufferSnapshot,
1861 highlight_query_capture_names: &[&str],
1862 marked_string: &str,
1863 ) {
1864 let mut actual_ranges = Vec::<Range<usize>>::new();
1865 let captures = syntax_map.captures(0..buffer.len(), buffer, |grammar| {
1866 grammar.highlights_query.as_ref()
1867 });
1868 let queries = captures
1869 .grammars()
1870 .iter()
1871 .map(|grammar| grammar.highlights_query.as_ref().unwrap())
1872 .collect::<Vec<_>>();
1873 for capture in captures {
1874 let name = &queries[capture.grammar_index].capture_names()[capture.index as usize];
1875 if highlight_query_capture_names.contains(&name.as_str()) {
1876 actual_ranges.push(capture.node.byte_range());
1877 }
1878 }
1879
1880 let (text, expected_ranges) = marked_text_ranges(&marked_string.unindent(), false);
1881 assert_eq!(text, buffer.text());
1882 assert_eq!(actual_ranges, expected_ranges);
1883 }
1884
1885 fn edit_buffer(buffer: &mut Buffer, marked_string: &str) {
1886 let old_text = buffer.text();
1887 let (new_text, mut ranges) = marked_text_ranges(marked_string, false);
1888 if ranges.is_empty() {
1889 ranges.push(0..new_text.len());
1890 }
1891
1892 assert_eq!(
1893 old_text[..ranges[0].start],
1894 new_text[..ranges[0].start],
1895 "invalid edit"
1896 );
1897
1898 let mut delta = 0;
1899 let mut edits = Vec::new();
1900 let mut ranges = ranges.into_iter().peekable();
1901
1902 while let Some(inserted_range) = ranges.next() {
1903 let new_start = inserted_range.start;
1904 let old_start = (new_start as isize - delta) as usize;
1905
1906 let following_text = if let Some(next_range) = ranges.peek() {
1907 &new_text[inserted_range.end..next_range.start]
1908 } else {
1909 &new_text[inserted_range.end..]
1910 };
1911
1912 let inserted_len = inserted_range.len();
1913 let deleted_len = old_text[old_start..]
1914 .find(following_text)
1915 .expect("invalid edit");
1916
1917 let old_range = old_start..old_start + deleted_len;
1918 edits.push((old_range, new_text[inserted_range].to_string()));
1919 delta += inserted_len as isize - deleted_len as isize;
1920 }
1921
1922 assert_eq!(
1923 old_text.len() as isize + delta,
1924 new_text.len() as isize,
1925 "invalid edit"
1926 );
1927
1928 buffer.edit(edits);
1929 }
1930
1931 pub fn string_contains_sequence(text: &str, parts: &[&str]) -> bool {
1932 let mut last_part_end = 0;
1933 for part in parts {
1934 if let Some(start_ix) = text[last_part_end..].find(part) {
1935 last_part_end = start_ix + part.len();
1936 } else {
1937 return false;
1938 }
1939 }
1940 true
1941 }
1942}