1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use text::operation_queue::OperationQueue;
34pub use text::{Buffer as TextBuffer, Operation as _, *};
35use theme::SyntaxTheme;
36use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
37use util::{post_inc, TryFutureExt as _};
38
39#[cfg(any(test, feature = "test-support"))]
40pub use tree_sitter_rust;
41
42pub use lsp::DiagnosticSeverity;
43
44thread_local! {
45 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
46}
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 diagnostics_update_count: usize,
69 language_server: Option<LanguageServerState>,
70 deferred_ops: OperationQueue<Operation>,
71 #[cfg(test)]
72 pub(crate) operations: Vec<Operation>,
73}
74
75pub struct BufferSnapshot {
76 text: text::BufferSnapshot,
77 tree: Option<Tree>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 is_parsing: bool,
81 language: Option<Arc<Language>>,
82 parse_count: usize,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86pub struct Diagnostic {
87 pub severity: DiagnosticSeverity,
88 pub message: String,
89 pub group_id: usize,
90 pub is_primary: bool,
91}
92
93struct LanguageServerState {
94 server: Arc<LanguageServer>,
95 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
96 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
97 next_version: usize,
98 _maintain_server: Task<Option<()>>,
99}
100
101#[derive(Clone)]
102struct LanguageServerSnapshot {
103 buffer_snapshot: text::BufferSnapshot,
104 version: usize,
105 path: Arc<Path>,
106}
107
108#[derive(Clone, Debug)]
109pub enum Operation {
110 Buffer(text::Operation),
111 UpdateDiagnostics {
112 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
113 lamport_timestamp: clock::Lamport,
114 },
115}
116
117#[derive(Clone, Debug, Eq, PartialEq)]
118pub enum Event {
119 Edited,
120 Dirtied,
121 Saved,
122 FileHandleChanged,
123 Reloaded,
124 Reparsed,
125 DiagnosticsUpdated,
126 Closed,
127}
128
129pub trait File {
130 fn worktree_id(&self) -> usize;
131
132 fn entry_id(&self) -> Option<usize>;
133
134 fn mtime(&self) -> SystemTime;
135
136 /// Returns the path of this file relative to the worktree's root directory.
137 fn path(&self) -> &Arc<Path>;
138
139 /// Returns the absolute path of this file.
140 fn abs_path(&self) -> Option<PathBuf>;
141
142 /// Returns the path of this file relative to the worktree's parent directory (this means it
143 /// includes the name of the worktree's root folder).
144 fn full_path(&self) -> PathBuf;
145
146 /// Returns the last component of this handle's absolute path. If this handle refers to the root
147 /// of its worktree, then this method will return the name of the worktree itself.
148 fn file_name(&self) -> Option<OsString>;
149
150 fn is_deleted(&self) -> bool;
151
152 fn save(
153 &self,
154 buffer_id: u64,
155 text: Rope,
156 version: clock::Global,
157 cx: &mut MutableAppContext,
158 ) -> Task<Result<(clock::Global, SystemTime)>>;
159
160 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
161
162 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
163
164 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
165
166 fn boxed_clone(&self) -> Box<dyn File>;
167
168 fn as_any(&self) -> &dyn Any;
169}
170
171struct QueryCursorHandle(Option<QueryCursor>);
172
173#[derive(Clone)]
174struct SyntaxTree {
175 tree: Tree,
176 version: clock::Global,
177}
178
179#[derive(Clone)]
180struct AutoindentRequest {
181 before_edit: BufferSnapshot,
182 edited: Vec<Anchor>,
183 inserted: Option<Vec<Range<Anchor>>>,
184}
185
186#[derive(Debug)]
187struct IndentSuggestion {
188 basis_row: u32,
189 indent: bool,
190}
191
192struct TextProvider<'a>(&'a Rope);
193
194struct BufferChunkHighlights<'a> {
195 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
196 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
197 stack: Vec<(usize, HighlightId)>,
198 highlight_map: HighlightMap,
199 theme: &'a SyntaxTheme,
200 _query_cursor: QueryCursorHandle,
201}
202
203pub struct BufferChunks<'a> {
204 range: Range<usize>,
205 chunks: rope::Chunks<'a>,
206 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
207 error_depth: usize,
208 warning_depth: usize,
209 information_depth: usize,
210 hint_depth: usize,
211 highlights: Option<BufferChunkHighlights<'a>>,
212}
213
214#[derive(Clone, Copy, Debug, Default)]
215pub struct Chunk<'a> {
216 pub text: &'a str,
217 pub highlight_style: Option<HighlightStyle>,
218 pub diagnostic: Option<DiagnosticSeverity>,
219}
220
221pub(crate) struct Diff {
222 base_version: clock::Global,
223 new_text: Arc<str>,
224 changes: Vec<(ChangeTag, usize)>,
225}
226
227#[derive(Clone, Copy)]
228struct DiagnosticEndpoint {
229 offset: usize,
230 is_start: bool,
231 severity: DiagnosticSeverity,
232}
233
234impl Buffer {
235 pub fn new<T: Into<Arc<str>>>(
236 replica_id: ReplicaId,
237 base_text: T,
238 cx: &mut ModelContext<Self>,
239 ) -> Self {
240 Self::build(
241 TextBuffer::new(
242 replica_id,
243 cx.model_id() as u64,
244 History::new(base_text.into()),
245 ),
246 None,
247 )
248 }
249
250 pub fn from_file<T: Into<Arc<str>>>(
251 replica_id: ReplicaId,
252 base_text: T,
253 file: Box<dyn File>,
254 cx: &mut ModelContext<Self>,
255 ) -> Self {
256 Self::build(
257 TextBuffer::new(
258 replica_id,
259 cx.model_id() as u64,
260 History::new(base_text.into()),
261 ),
262 Some(file),
263 )
264 }
265
266 pub fn from_proto(
267 replica_id: ReplicaId,
268 message: proto::Buffer,
269 file: Option<Box<dyn File>>,
270 cx: &mut ModelContext<Self>,
271 ) -> Result<Self> {
272 let mut buffer =
273 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
274 let ops = message
275 .history
276 .into_iter()
277 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
278 buffer.apply_ops(ops)?;
279 let mut this = Self::build(buffer, file);
280 this.apply_diagnostic_update(
281 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
282 cx,
283 );
284
285 Ok(this)
286 }
287
288 pub fn to_proto(&self) -> proto::Buffer {
289 proto::Buffer {
290 id: self.remote_id(),
291 content: self.text.base_text().to_string(),
292 history: self
293 .text
294 .history()
295 .map(proto::serialize_edit_operation)
296 .collect(),
297 selections: Vec::new(),
298 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
299 }
300 }
301
302 pub fn with_language(
303 mut self,
304 language: Option<Arc<Language>>,
305 language_server: Option<Arc<LanguageServer>>,
306 cx: &mut ModelContext<Self>,
307 ) -> Self {
308 self.set_language(language, language_server, cx);
309 self
310 }
311
312 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
313 let saved_mtime;
314 if let Some(file) = file.as_ref() {
315 saved_mtime = file.mtime();
316 } else {
317 saved_mtime = UNIX_EPOCH;
318 }
319
320 Self {
321 saved_mtime,
322 saved_version: buffer.version(),
323 text: buffer,
324 file,
325 syntax_tree: Mutex::new(None),
326 parsing_in_background: false,
327 parse_count: 0,
328 sync_parse_timeout: Duration::from_millis(1),
329 autoindent_requests: Default::default(),
330 pending_autoindent: Default::default(),
331 language: None,
332 diagnostics: Default::default(),
333 diagnostics_update_count: 0,
334 language_server: None,
335 deferred_ops: OperationQueue::new(),
336 #[cfg(test)]
337 operations: Default::default(),
338 }
339 }
340
341 pub fn snapshot(&self) -> BufferSnapshot {
342 BufferSnapshot {
343 text: self.text.snapshot(),
344 tree: self.syntax_tree(),
345 diagnostics: self.diagnostics.clone(),
346 diagnostics_update_count: self.diagnostics_update_count,
347 is_parsing: self.parsing_in_background,
348 language: self.language.clone(),
349 parse_count: self.parse_count,
350 }
351 }
352
353 pub fn file(&self) -> Option<&dyn File> {
354 self.file.as_deref()
355 }
356
357 pub fn save(
358 &mut self,
359 cx: &mut ModelContext<Self>,
360 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
361 let file = self
362 .file
363 .as_ref()
364 .ok_or_else(|| anyhow!("buffer has no file"))?;
365 let text = self.as_rope().clone();
366 let version = self.version();
367 let save = file.save(self.remote_id(), text, version, cx.as_mut());
368 Ok(cx.spawn(|this, mut cx| async move {
369 let (version, mtime) = save.await?;
370 this.update(&mut cx, |this, cx| {
371 this.did_save(version.clone(), mtime, None, cx);
372 });
373 Ok((version, mtime))
374 }))
375 }
376
377 pub fn set_language(
378 &mut self,
379 language: Option<Arc<Language>>,
380 language_server: Option<Arc<lsp::LanguageServer>>,
381 cx: &mut ModelContext<Self>,
382 ) {
383 self.language = language;
384 self.language_server = if let Some(server) = language_server {
385 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
386 Some(LanguageServerState {
387 latest_snapshot: latest_snapshot_tx,
388 pending_snapshots: Default::default(),
389 next_version: 0,
390 server: server.clone(),
391 _maintain_server: cx.background().spawn(
392 async move {
393 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
394 while let Some(snapshot) = latest_snapshot_rx.recv().await {
395 if let Some(snapshot) = snapshot {
396 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
397 if let Some(prev_snapshot) = prev_snapshot {
398 let changes = lsp::DidChangeTextDocumentParams {
399 text_document: lsp::VersionedTextDocumentIdentifier::new(
400 uri,
401 snapshot.version as i32,
402 ),
403 content_changes: snapshot
404 .buffer_snapshot
405 .edits_since::<(PointUtf16, usize)>(
406 prev_snapshot.buffer_snapshot.version(),
407 )
408 .map(|edit| {
409 let edit_start = edit.new.start.0;
410 let edit_end = edit_start
411 + (edit.old.end.0 - edit.old.start.0);
412 let new_text = snapshot
413 .buffer_snapshot
414 .text_for_range(
415 edit.new.start.1..edit.new.end.1,
416 )
417 .collect();
418 lsp::TextDocumentContentChangeEvent {
419 range: Some(lsp::Range::new(
420 lsp::Position::new(
421 edit_start.row,
422 edit_start.column,
423 ),
424 lsp::Position::new(
425 edit_end.row,
426 edit_end.column,
427 ),
428 )),
429 range_length: None,
430 text: new_text,
431 }
432 })
433 .collect(),
434 };
435 server
436 .notify::<lsp::notification::DidChangeTextDocument>(changes)
437 .await?;
438 } else {
439 server
440 .notify::<lsp::notification::DidOpenTextDocument>(
441 lsp::DidOpenTextDocumentParams {
442 text_document: lsp::TextDocumentItem::new(
443 uri,
444 Default::default(),
445 snapshot.version as i32,
446 snapshot.buffer_snapshot.text().to_string(),
447 ),
448 },
449 )
450 .await?;
451 }
452
453 prev_snapshot = Some(snapshot);
454 }
455 }
456 Ok(())
457 }
458 .log_err(),
459 ),
460 })
461 } else {
462 None
463 };
464
465 self.reparse(cx);
466 self.update_language_server();
467 }
468
469 pub fn did_save(
470 &mut self,
471 version: clock::Global,
472 mtime: SystemTime,
473 new_file: Option<Box<dyn File>>,
474 cx: &mut ModelContext<Self>,
475 ) {
476 self.saved_mtime = mtime;
477 self.saved_version = version;
478 if let Some(new_file) = new_file {
479 self.file = Some(new_file);
480 }
481 if let Some(state) = &self.language_server {
482 cx.background()
483 .spawn(
484 state
485 .server
486 .notify::<lsp::notification::DidSaveTextDocument>(
487 lsp::DidSaveTextDocumentParams {
488 text_document: lsp::TextDocumentIdentifier {
489 uri: lsp::Url::from_file_path(
490 self.file.as_ref().unwrap().abs_path().unwrap(),
491 )
492 .unwrap(),
493 },
494 text: None,
495 },
496 ),
497 )
498 .detach()
499 }
500 cx.emit(Event::Saved);
501 }
502
503 pub fn file_updated(
504 &mut self,
505 new_file: Box<dyn File>,
506 cx: &mut ModelContext<Self>,
507 ) -> Option<Task<()>> {
508 let old_file = self.file.as_ref()?;
509 let mut file_changed = false;
510 let mut task = None;
511
512 if new_file.path() != old_file.path() {
513 file_changed = true;
514 }
515
516 if new_file.is_deleted() {
517 if !old_file.is_deleted() {
518 file_changed = true;
519 if !self.is_dirty() {
520 cx.emit(Event::Dirtied);
521 }
522 }
523 } else {
524 let new_mtime = new_file.mtime();
525 if new_mtime != old_file.mtime() {
526 file_changed = true;
527
528 if !self.is_dirty() {
529 task = Some(cx.spawn(|this, mut cx| {
530 async move {
531 let new_text = this.read_with(&cx, |this, cx| {
532 this.file.as_ref().and_then(|file| file.load_local(cx))
533 });
534 if let Some(new_text) = new_text {
535 let new_text = new_text.await?;
536 let diff = this
537 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
538 .await;
539 this.update(&mut cx, |this, cx| {
540 if this.apply_diff(diff, cx) {
541 this.saved_version = this.version();
542 this.saved_mtime = new_mtime;
543 cx.emit(Event::Reloaded);
544 }
545 });
546 }
547 Ok(())
548 }
549 .log_err()
550 .map(drop)
551 }));
552 }
553 }
554 }
555
556 if file_changed {
557 cx.emit(Event::FileHandleChanged);
558 }
559 self.file = Some(new_file);
560 task
561 }
562
563 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
564 cx.emit(Event::Closed);
565 }
566
567 pub fn language(&self) -> Option<&Arc<Language>> {
568 self.language.as_ref()
569 }
570
571 pub fn parse_count(&self) -> usize {
572 self.parse_count
573 }
574
575 pub fn diagnostics_update_count(&self) -> usize {
576 self.diagnostics_update_count
577 }
578
579 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
580 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
581 self.interpolate_tree(syntax_tree);
582 Some(syntax_tree.tree.clone())
583 } else {
584 None
585 }
586 }
587
588 #[cfg(any(test, feature = "test-support"))]
589 pub fn is_parsing(&self) -> bool {
590 self.parsing_in_background
591 }
592
593 #[cfg(test)]
594 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
595 self.sync_parse_timeout = timeout;
596 }
597
598 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
599 if self.parsing_in_background {
600 return false;
601 }
602
603 if let Some(grammar) = self.grammar().cloned() {
604 let old_tree = self.syntax_tree();
605 let text = self.as_rope().clone();
606 let parsed_version = self.version();
607 let parse_task = cx.background().spawn({
608 let grammar = grammar.clone();
609 async move { Self::parse_text(&text, old_tree, &grammar) }
610 });
611
612 match cx
613 .background()
614 .block_with_timeout(self.sync_parse_timeout, parse_task)
615 {
616 Ok(new_tree) => {
617 self.did_finish_parsing(new_tree, parsed_version, cx);
618 return true;
619 }
620 Err(parse_task) => {
621 self.parsing_in_background = true;
622 cx.spawn(move |this, mut cx| async move {
623 let new_tree = parse_task.await;
624 this.update(&mut cx, move |this, cx| {
625 let grammar_changed = this
626 .grammar()
627 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
628 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
629 this.parsing_in_background = false;
630 this.did_finish_parsing(new_tree, parsed_version, cx);
631
632 if parse_again && this.reparse(cx) {
633 return;
634 }
635 });
636 })
637 .detach();
638 }
639 }
640 }
641 false
642 }
643
644 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
645 PARSER.with(|parser| {
646 let mut parser = parser.borrow_mut();
647 parser
648 .set_language(grammar.ts_language)
649 .expect("incompatible grammar");
650 let mut chunks = text.chunks_in_range(0..text.len());
651 let tree = parser
652 .parse_with(
653 &mut move |offset, _| {
654 chunks.seek(offset);
655 chunks.next().unwrap_or("").as_bytes()
656 },
657 old_tree.as_ref(),
658 )
659 .unwrap();
660 tree
661 })
662 }
663
664 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
665 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
666 let (bytes, lines) = edit.flatten();
667 tree.tree.edit(&InputEdit {
668 start_byte: bytes.new.start,
669 old_end_byte: bytes.new.start + bytes.old.len(),
670 new_end_byte: bytes.new.end,
671 start_position: lines.new.start.to_ts_point(),
672 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
673 .to_ts_point(),
674 new_end_position: lines.new.end.to_ts_point(),
675 });
676 }
677 tree.version = self.version();
678 }
679
680 fn did_finish_parsing(
681 &mut self,
682 tree: Tree,
683 version: clock::Global,
684 cx: &mut ModelContext<Self>,
685 ) {
686 self.parse_count += 1;
687 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
688 self.request_autoindent(cx);
689 cx.emit(Event::Reparsed);
690 cx.notify();
691 }
692
693 pub fn update_diagnostics(
694 &mut self,
695 version: Option<i32>,
696 mut diagnostics: Vec<lsp::Diagnostic>,
697 cx: &mut ModelContext<Self>,
698 ) -> Result<Operation> {
699 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
700
701 let version = version.map(|version| version as usize);
702 let content = if let Some(version) = version {
703 let language_server = self.language_server.as_mut().unwrap();
704 let snapshot = language_server
705 .pending_snapshots
706 .get(&version)
707 .ok_or_else(|| anyhow!("missing snapshot"))?;
708 &snapshot.buffer_snapshot
709 } else {
710 self.deref()
711 };
712 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
713
714 let empty_set = HashSet::new();
715 let disk_based_sources = self
716 .language
717 .as_ref()
718 .and_then(|language| language.disk_based_diagnostic_sources())
719 .unwrap_or(&empty_set);
720
721 let mut edits_since_save = content
722 .edits_since::<PointUtf16>(&self.saved_version)
723 .peekable();
724 let mut last_edit_old_end = PointUtf16::zero();
725 let mut last_edit_new_end = PointUtf16::zero();
726 let mut group_ids_by_diagnostic_range = HashMap::new();
727 let mut diagnostics_by_group_id = HashMap::new();
728 let mut next_group_id = 0;
729 'outer: for diagnostic in &diagnostics {
730 let mut start = diagnostic.range.start.to_point_utf16();
731 let mut end = diagnostic.range.end.to_point_utf16();
732 let source = diagnostic.source.as_ref();
733 let code = diagnostic.code.as_ref();
734 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
735 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
736 .copied()
737 .unwrap_or_else(|| {
738 let group_id = post_inc(&mut next_group_id);
739 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
740 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
741 }
742 group_id
743 });
744
745 if diagnostic
746 .source
747 .as_ref()
748 .map_or(false, |source| disk_based_sources.contains(source))
749 {
750 while let Some(edit) = edits_since_save.peek() {
751 if edit.old.end <= start {
752 last_edit_old_end = edit.old.end;
753 last_edit_new_end = edit.new.end;
754 edits_since_save.next();
755 } else if edit.old.start <= end && edit.old.end >= start {
756 continue 'outer;
757 } else {
758 break;
759 }
760 }
761
762 start = last_edit_new_end + (start - last_edit_old_end);
763 end = last_edit_new_end + (end - last_edit_old_end);
764 }
765
766 let mut range = content.clip_point_utf16(start, Bias::Left)
767 ..content.clip_point_utf16(end, Bias::Right);
768 if range.start == range.end {
769 range.end.column += 1;
770 range.end = content.clip_point_utf16(range.end, Bias::Right);
771 if range.start == range.end && range.end.column > 0 {
772 range.start.column -= 1;
773 range.start = content.clip_point_utf16(range.start, Bias::Left);
774 }
775 }
776
777 diagnostics_by_group_id
778 .entry(group_id)
779 .or_insert(Vec::new())
780 .push(DiagnosticEntry {
781 range,
782 diagnostic: Diagnostic {
783 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
784 message: diagnostic.message.clone(),
785 group_id,
786 is_primary: false,
787 },
788 });
789 }
790
791 drop(edits_since_save);
792 let new_diagnostics = DiagnosticSet::new(
793 diagnostics_by_group_id
794 .into_values()
795 .flat_map(|mut diagnostics| {
796 let primary = diagnostics
797 .iter_mut()
798 .min_by_key(|entry| entry.diagnostic.severity)
799 .unwrap();
800 primary.diagnostic.is_primary = true;
801 diagnostics
802 }),
803 content,
804 );
805 self.diagnostics = new_diagnostics;
806
807 if let Some(version) = version {
808 let language_server = self.language_server.as_mut().unwrap();
809 let versions_to_delete = language_server
810 .pending_snapshots
811 .range(..version)
812 .map(|(v, _)| *v)
813 .collect::<Vec<_>>();
814 for version in versions_to_delete {
815 language_server.pending_snapshots.remove(&version);
816 }
817 }
818
819 self.diagnostics_update_count += 1;
820 cx.notify();
821 cx.emit(Event::DiagnosticsUpdated);
822 Ok(Operation::UpdateDiagnostics {
823 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
824 lamport_timestamp: self.lamport_timestamp(),
825 })
826 }
827
828 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
829 if let Some(indent_columns) = self.compute_autoindents() {
830 let indent_columns = cx.background().spawn(indent_columns);
831 match cx
832 .background()
833 .block_with_timeout(Duration::from_micros(500), indent_columns)
834 {
835 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
836 Err(indent_columns) => {
837 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
838 let indent_columns = indent_columns.await;
839 this.update(&mut cx, |this, cx| {
840 this.apply_autoindents(indent_columns, cx);
841 });
842 }));
843 }
844 }
845 }
846 }
847
848 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
849 let max_rows_between_yields = 100;
850 let snapshot = self.snapshot();
851 if snapshot.language.is_none()
852 || snapshot.tree.is_none()
853 || self.autoindent_requests.is_empty()
854 {
855 return None;
856 }
857
858 let autoindent_requests = self.autoindent_requests.clone();
859 Some(async move {
860 let mut indent_columns = BTreeMap::new();
861 for request in autoindent_requests {
862 let old_to_new_rows = request
863 .edited
864 .iter()
865 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
866 .zip(
867 request
868 .edited
869 .iter()
870 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
871 )
872 .collect::<BTreeMap<u32, u32>>();
873
874 let mut old_suggestions = HashMap::<u32, u32>::default();
875 let old_edited_ranges =
876 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
877 for old_edited_range in old_edited_ranges {
878 let suggestions = request
879 .before_edit
880 .suggest_autoindents(old_edited_range.clone())
881 .into_iter()
882 .flatten();
883 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
884 let indentation_basis = old_to_new_rows
885 .get(&suggestion.basis_row)
886 .and_then(|from_row| old_suggestions.get(from_row).copied())
887 .unwrap_or_else(|| {
888 request
889 .before_edit
890 .indent_column_for_line(suggestion.basis_row)
891 });
892 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
893 old_suggestions.insert(
894 *old_to_new_rows.get(&old_row).unwrap(),
895 indentation_basis + delta,
896 );
897 }
898 yield_now().await;
899 }
900
901 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
902 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
903 let new_edited_row_ranges =
904 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
905 for new_edited_row_range in new_edited_row_ranges {
906 let suggestions = snapshot
907 .suggest_autoindents(new_edited_row_range.clone())
908 .into_iter()
909 .flatten();
910 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
911 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
912 let new_indentation = indent_columns
913 .get(&suggestion.basis_row)
914 .copied()
915 .unwrap_or_else(|| {
916 snapshot.indent_column_for_line(suggestion.basis_row)
917 })
918 + delta;
919 if old_suggestions
920 .get(&new_row)
921 .map_or(true, |old_indentation| new_indentation != *old_indentation)
922 {
923 indent_columns.insert(new_row, new_indentation);
924 }
925 }
926 yield_now().await;
927 }
928
929 if let Some(inserted) = request.inserted.as_ref() {
930 let inserted_row_ranges = contiguous_ranges(
931 inserted
932 .iter()
933 .map(|range| range.to_point(&snapshot))
934 .flat_map(|range| range.start.row..range.end.row + 1),
935 max_rows_between_yields,
936 );
937 for inserted_row_range in inserted_row_ranges {
938 let suggestions = snapshot
939 .suggest_autoindents(inserted_row_range.clone())
940 .into_iter()
941 .flatten();
942 for (row, suggestion) in inserted_row_range.zip(suggestions) {
943 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
944 let new_indentation = indent_columns
945 .get(&suggestion.basis_row)
946 .copied()
947 .unwrap_or_else(|| {
948 snapshot.indent_column_for_line(suggestion.basis_row)
949 })
950 + delta;
951 indent_columns.insert(row, new_indentation);
952 }
953 yield_now().await;
954 }
955 }
956 }
957 indent_columns
958 })
959 }
960
961 fn apply_autoindents(
962 &mut self,
963 indent_columns: BTreeMap<u32, u32>,
964 cx: &mut ModelContext<Self>,
965 ) {
966 self.start_transaction();
967 for (row, indent_column) in &indent_columns {
968 self.set_indent_column_for_line(*row, *indent_column, cx);
969 }
970 self.end_transaction(cx);
971 }
972
973 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
974 let current_column = self.indent_column_for_line(row);
975 if column > current_column {
976 let offset = Point::new(row, 0).to_offset(&*self);
977 self.edit(
978 [offset..offset],
979 " ".repeat((column - current_column) as usize),
980 cx,
981 );
982 } else if column < current_column {
983 self.edit(
984 [Point::new(row, 0)..Point::new(row, current_column - column)],
985 "",
986 cx,
987 );
988 }
989 }
990
991 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
992 // TODO: it would be nice to not allocate here.
993 let old_text = self.text();
994 let base_version = self.version();
995 cx.background().spawn(async move {
996 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
997 .iter_all_changes()
998 .map(|c| (c.tag(), c.value().len()))
999 .collect::<Vec<_>>();
1000 Diff {
1001 base_version,
1002 new_text,
1003 changes,
1004 }
1005 })
1006 }
1007
1008 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1009 if self.version == diff.base_version {
1010 self.start_transaction();
1011 let mut offset = 0;
1012 for (tag, len) in diff.changes {
1013 let range = offset..(offset + len);
1014 match tag {
1015 ChangeTag::Equal => offset += len,
1016 ChangeTag::Delete => self.edit(Some(range), "", cx),
1017 ChangeTag::Insert => {
1018 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1019 offset += len;
1020 }
1021 }
1022 }
1023 self.end_transaction(cx);
1024 true
1025 } else {
1026 false
1027 }
1028 }
1029
1030 pub fn is_dirty(&self) -> bool {
1031 !self.saved_version.ge(&self.version)
1032 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1033 }
1034
1035 pub fn has_conflict(&self) -> bool {
1036 !self.saved_version.ge(&self.version)
1037 && self
1038 .file
1039 .as_ref()
1040 .map_or(false, |file| file.mtime() > self.saved_mtime)
1041 }
1042
1043 pub fn subscribe(&mut self) -> Subscription {
1044 self.text.subscribe()
1045 }
1046
1047 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1048 self.start_transaction_at(Instant::now())
1049 }
1050
1051 pub(crate) fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1052 self.text.start_transaction_at(now)
1053 }
1054
1055 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1056 self.end_transaction_at(Instant::now(), cx)
1057 }
1058
1059 pub(crate) fn end_transaction_at(
1060 &mut self,
1061 now: Instant,
1062 cx: &mut ModelContext<Self>,
1063 ) -> Option<TransactionId> {
1064 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1065 let was_dirty = start_version != self.saved_version;
1066 self.did_edit(&start_version, was_dirty, cx);
1067 Some(transaction_id)
1068 } else {
1069 None
1070 }
1071 }
1072
1073 fn update_language_server(&mut self) {
1074 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1075 language_server
1076 } else {
1077 return;
1078 };
1079 let abs_path = self
1080 .file
1081 .as_ref()
1082 .map_or(Path::new("/").to_path_buf(), |file| {
1083 file.abs_path().unwrap()
1084 });
1085
1086 let version = post_inc(&mut language_server.next_version);
1087 let snapshot = LanguageServerSnapshot {
1088 buffer_snapshot: self.text.snapshot(),
1089 version,
1090 path: Arc::from(abs_path),
1091 };
1092 language_server
1093 .pending_snapshots
1094 .insert(version, snapshot.clone());
1095 let _ = language_server
1096 .latest_snapshot
1097 .blocking_send(Some(snapshot));
1098 }
1099
1100 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1101 where
1102 I: IntoIterator<Item = Range<S>>,
1103 S: ToOffset,
1104 T: Into<String>,
1105 {
1106 self.edit_internal(ranges_iter, new_text, false, cx)
1107 }
1108
1109 pub fn edit_with_autoindent<I, S, T>(
1110 &mut self,
1111 ranges_iter: I,
1112 new_text: T,
1113 cx: &mut ModelContext<Self>,
1114 ) where
1115 I: IntoIterator<Item = Range<S>>,
1116 S: ToOffset,
1117 T: Into<String>,
1118 {
1119 self.edit_internal(ranges_iter, new_text, true, cx)
1120 }
1121
1122 pub fn edit_internal<I, S, T>(
1123 &mut self,
1124 ranges_iter: I,
1125 new_text: T,
1126 autoindent: bool,
1127 cx: &mut ModelContext<Self>,
1128 ) where
1129 I: IntoIterator<Item = Range<S>>,
1130 S: ToOffset,
1131 T: Into<String>,
1132 {
1133 let new_text = new_text.into();
1134
1135 // Skip invalid ranges and coalesce contiguous ones.
1136 let mut ranges: Vec<Range<usize>> = Vec::new();
1137 for range in ranges_iter {
1138 let range = range.start.to_offset(self)..range.end.to_offset(self);
1139 if !new_text.is_empty() || !range.is_empty() {
1140 if let Some(prev_range) = ranges.last_mut() {
1141 if prev_range.end >= range.start {
1142 prev_range.end = cmp::max(prev_range.end, range.end);
1143 } else {
1144 ranges.push(range);
1145 }
1146 } else {
1147 ranges.push(range);
1148 }
1149 }
1150 }
1151 if ranges.is_empty() {
1152 return;
1153 }
1154
1155 self.start_transaction();
1156 self.pending_autoindent.take();
1157 let autoindent_request = if autoindent && self.language.is_some() {
1158 let before_edit = self.snapshot();
1159 let edited = ranges
1160 .iter()
1161 .filter_map(|range| {
1162 let start = range.start.to_point(self);
1163 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1164 None
1165 } else {
1166 Some(self.anchor_before(range.start))
1167 }
1168 })
1169 .collect();
1170 Some((before_edit, edited))
1171 } else {
1172 None
1173 };
1174
1175 let first_newline_ix = new_text.find('\n');
1176 let new_text_len = new_text.len();
1177
1178 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1179
1180 if let Some((before_edit, edited)) = autoindent_request {
1181 let mut inserted = None;
1182 if let Some(first_newline_ix) = first_newline_ix {
1183 let mut delta = 0isize;
1184 inserted = Some(
1185 ranges
1186 .iter()
1187 .map(|range| {
1188 let start =
1189 (delta + range.start as isize) as usize + first_newline_ix + 1;
1190 let end = (delta + range.start as isize) as usize + new_text_len;
1191 delta +=
1192 (range.end as isize - range.start as isize) + new_text_len as isize;
1193 self.anchor_before(start)..self.anchor_after(end)
1194 })
1195 .collect(),
1196 );
1197 }
1198
1199 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1200 before_edit,
1201 edited,
1202 inserted,
1203 }));
1204 }
1205
1206 self.end_transaction(cx);
1207 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1208 }
1209
1210 fn did_edit(
1211 &mut self,
1212 old_version: &clock::Global,
1213 was_dirty: bool,
1214 cx: &mut ModelContext<Self>,
1215 ) {
1216 if self.edits_since::<usize>(old_version).next().is_none() {
1217 return;
1218 }
1219
1220 self.reparse(cx);
1221 self.update_language_server();
1222
1223 cx.emit(Event::Edited);
1224 if !was_dirty {
1225 cx.emit(Event::Dirtied);
1226 }
1227 cx.notify();
1228 }
1229
1230 fn grammar(&self) -> Option<&Arc<Grammar>> {
1231 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1232 }
1233
1234 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1235 &mut self,
1236 ops: I,
1237 cx: &mut ModelContext<Self>,
1238 ) -> Result<()> {
1239 self.pending_autoindent.take();
1240 let was_dirty = self.is_dirty();
1241 let old_version = self.version.clone();
1242 let mut deferred_ops = Vec::new();
1243 let buffer_ops = ops
1244 .into_iter()
1245 .filter_map(|op| match op {
1246 Operation::Buffer(op) => Some(op),
1247 _ => {
1248 if self.can_apply_op(&op) {
1249 self.apply_op(op, cx);
1250 } else {
1251 deferred_ops.push(op);
1252 }
1253 None
1254 }
1255 })
1256 .collect::<Vec<_>>();
1257 self.text.apply_ops(buffer_ops)?;
1258 self.flush_deferred_ops(cx);
1259 self.did_edit(&old_version, was_dirty, cx);
1260 // Notify independently of whether the buffer was edited as the operations could include a
1261 // selection update.
1262 cx.notify();
1263 Ok(())
1264 }
1265
1266 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1267 let mut deferred_ops = Vec::new();
1268 for op in self.deferred_ops.drain().iter().cloned() {
1269 if self.can_apply_op(&op) {
1270 self.apply_op(op, cx);
1271 } else {
1272 deferred_ops.push(op);
1273 }
1274 }
1275 self.deferred_ops.insert(deferred_ops);
1276 }
1277
1278 fn can_apply_op(&self, operation: &Operation) -> bool {
1279 match operation {
1280 Operation::Buffer(_) => {
1281 unreachable!("buffer operations should never be applied at this layer")
1282 }
1283 Operation::UpdateDiagnostics { diagnostics, .. } => {
1284 diagnostics.iter().all(|diagnostic| {
1285 self.text.can_resolve(&diagnostic.range.start)
1286 && self.text.can_resolve(&diagnostic.range.end)
1287 })
1288 }
1289 }
1290 }
1291
1292 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1293 match operation {
1294 Operation::Buffer(_) => {
1295 unreachable!("buffer operations should never be applied at this layer")
1296 }
1297 Operation::UpdateDiagnostics { diagnostics, .. } => {
1298 self.apply_diagnostic_update(diagnostics, cx);
1299 }
1300 }
1301 }
1302
1303 fn apply_diagnostic_update(
1304 &mut self,
1305 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1306 cx: &mut ModelContext<Self>,
1307 ) {
1308 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1309 self.diagnostics_update_count += 1;
1310 cx.notify();
1311 }
1312
1313 #[cfg(not(test))]
1314 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1315 if let Some(file) = &self.file {
1316 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1317 }
1318 }
1319
1320 #[cfg(test)]
1321 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1322 self.operations.push(operation);
1323 }
1324
1325 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1326 self.text.remove_peer(replica_id);
1327 cx.notify();
1328 }
1329
1330 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1331 let was_dirty = self.is_dirty();
1332 let old_version = self.version.clone();
1333
1334 if let Some((transaction_id, operation)) = self.text.undo() {
1335 self.send_operation(Operation::Buffer(operation), cx);
1336 self.did_edit(&old_version, was_dirty, cx);
1337 Some(transaction_id)
1338 } else {
1339 None
1340 }
1341 }
1342
1343 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1344 let was_dirty = self.is_dirty();
1345 let old_version = self.version.clone();
1346
1347 if let Some((transaction_id, operation)) = self.text.redo() {
1348 self.send_operation(Operation::Buffer(operation), cx);
1349 self.did_edit(&old_version, was_dirty, cx);
1350 Some(transaction_id)
1351 } else {
1352 None
1353 }
1354 }
1355}
1356
1357#[cfg(any(test, feature = "test-support"))]
1358impl Buffer {
1359 pub fn randomly_edit<T>(
1360 &mut self,
1361 rng: &mut T,
1362 old_range_count: usize,
1363 cx: &mut ModelContext<Self>,
1364 ) where
1365 T: rand::Rng,
1366 {
1367 self.start_transaction();
1368 self.text.randomly_edit(rng, old_range_count);
1369 self.end_transaction(cx);
1370 }
1371}
1372
1373impl Entity for Buffer {
1374 type Event = Event;
1375
1376 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1377 if let Some(file) = self.file.as_ref() {
1378 file.buffer_removed(self.remote_id(), cx);
1379 }
1380 }
1381}
1382
1383impl Deref for Buffer {
1384 type Target = TextBuffer;
1385
1386 fn deref(&self) -> &Self::Target {
1387 &self.text
1388 }
1389}
1390
1391impl BufferSnapshot {
1392 fn suggest_autoindents<'a>(
1393 &'a self,
1394 row_range: Range<u32>,
1395 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1396 let mut query_cursor = QueryCursorHandle::new();
1397 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1398 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1399
1400 // Get the "indentation ranges" that intersect this row range.
1401 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1402 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1403 query_cursor.set_point_range(
1404 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1405 ..Point::new(row_range.end, 0).to_ts_point(),
1406 );
1407 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1408 for mat in query_cursor.matches(
1409 &grammar.indents_query,
1410 tree.root_node(),
1411 TextProvider(self.as_rope()),
1412 ) {
1413 let mut node_kind = "";
1414 let mut start: Option<Point> = None;
1415 let mut end: Option<Point> = None;
1416 for capture in mat.captures {
1417 if Some(capture.index) == indent_capture_ix {
1418 node_kind = capture.node.kind();
1419 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1420 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1421 } else if Some(capture.index) == end_capture_ix {
1422 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1423 }
1424 }
1425
1426 if let Some((start, end)) = start.zip(end) {
1427 if start.row == end.row {
1428 continue;
1429 }
1430
1431 let range = start..end;
1432 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1433 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1434 Ok(ix) => {
1435 let prev_range = &mut indentation_ranges[ix];
1436 prev_range.0.end = prev_range.0.end.max(range.end);
1437 }
1438 }
1439 }
1440 }
1441
1442 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1443 Some(row_range.map(move |row| {
1444 let row_start = Point::new(row, self.indent_column_for_line(row));
1445
1446 let mut indent_from_prev_row = false;
1447 let mut outdent_to_row = u32::MAX;
1448 for (range, _node_kind) in &indentation_ranges {
1449 if range.start.row >= row {
1450 break;
1451 }
1452
1453 if range.start.row == prev_row && range.end > row_start {
1454 indent_from_prev_row = true;
1455 }
1456 if range.end.row >= prev_row && range.end <= row_start {
1457 outdent_to_row = outdent_to_row.min(range.start.row);
1458 }
1459 }
1460
1461 let suggestion = if outdent_to_row == prev_row {
1462 IndentSuggestion {
1463 basis_row: prev_row,
1464 indent: false,
1465 }
1466 } else if indent_from_prev_row {
1467 IndentSuggestion {
1468 basis_row: prev_row,
1469 indent: true,
1470 }
1471 } else if outdent_to_row < prev_row {
1472 IndentSuggestion {
1473 basis_row: outdent_to_row,
1474 indent: false,
1475 }
1476 } else {
1477 IndentSuggestion {
1478 basis_row: prev_row,
1479 indent: false,
1480 }
1481 };
1482
1483 prev_row = row;
1484 suggestion
1485 }))
1486 } else {
1487 None
1488 }
1489 }
1490
1491 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1492 while row > 0 {
1493 row -= 1;
1494 if !self.is_line_blank(row) {
1495 return Some(row);
1496 }
1497 }
1498 None
1499 }
1500
1501 pub fn chunks<'a, T: ToOffset>(
1502 &'a self,
1503 range: Range<T>,
1504 theme: Option<&'a SyntaxTheme>,
1505 ) -> BufferChunks<'a> {
1506 let range = range.start.to_offset(self)..range.end.to_offset(self);
1507
1508 let mut highlights = None;
1509 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1510 if let Some(theme) = theme {
1511 for entry in self
1512 .diagnostics
1513 .range::<_, usize>(range.clone(), self, true)
1514 {
1515 diagnostic_endpoints.push(DiagnosticEndpoint {
1516 offset: entry.range.start,
1517 is_start: true,
1518 severity: entry.diagnostic.severity,
1519 });
1520 diagnostic_endpoints.push(DiagnosticEndpoint {
1521 offset: entry.range.end,
1522 is_start: false,
1523 severity: entry.diagnostic.severity,
1524 });
1525 }
1526 diagnostic_endpoints
1527 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1528
1529 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1530 let mut query_cursor = QueryCursorHandle::new();
1531
1532 // TODO - add a Tree-sitter API to remove the need for this.
1533 let cursor = unsafe {
1534 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1535 };
1536 let captures = cursor.set_byte_range(range.clone()).captures(
1537 &grammar.highlights_query,
1538 tree.root_node(),
1539 TextProvider(self.text.as_rope()),
1540 );
1541 highlights = Some(BufferChunkHighlights {
1542 captures,
1543 next_capture: None,
1544 stack: Default::default(),
1545 highlight_map: grammar.highlight_map(),
1546 _query_cursor: query_cursor,
1547 theme,
1548 })
1549 }
1550 }
1551
1552 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1553 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1554
1555 BufferChunks {
1556 range,
1557 chunks,
1558 diagnostic_endpoints,
1559 error_depth: 0,
1560 warning_depth: 0,
1561 information_depth: 0,
1562 hint_depth: 0,
1563 highlights,
1564 }
1565 }
1566
1567 pub fn language(&self) -> Option<&Arc<Language>> {
1568 self.language.as_ref()
1569 }
1570
1571 fn grammar(&self) -> Option<&Arc<Grammar>> {
1572 self.language
1573 .as_ref()
1574 .and_then(|language| language.grammar.as_ref())
1575 }
1576
1577 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1578 if let Some(tree) = self.tree.as_ref() {
1579 let root = tree.root_node();
1580 let range = range.start.to_offset(self)..range.end.to_offset(self);
1581 let mut node = root.descendant_for_byte_range(range.start, range.end);
1582 while node.map_or(false, |n| n.byte_range() == range) {
1583 node = node.unwrap().parent();
1584 }
1585 node.map(|n| n.byte_range())
1586 } else {
1587 None
1588 }
1589 }
1590
1591 pub fn enclosing_bracket_ranges<T: ToOffset>(
1592 &self,
1593 range: Range<T>,
1594 ) -> Option<(Range<usize>, Range<usize>)> {
1595 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1596 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1597 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1598
1599 // Find bracket pairs that *inclusively* contain the given range.
1600 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1601 let mut cursor = QueryCursorHandle::new();
1602 let matches = cursor.set_byte_range(range).matches(
1603 &grammar.brackets_query,
1604 tree.root_node(),
1605 TextProvider(self.as_rope()),
1606 );
1607
1608 // Get the ranges of the innermost pair of brackets.
1609 matches
1610 .filter_map(|mat| {
1611 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1612 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1613 Some((open.byte_range(), close.byte_range()))
1614 })
1615 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1616 }
1617
1618 pub fn diagnostics_in_range<'a, T, O>(
1619 &'a self,
1620 search_range: Range<T>,
1621 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1622 where
1623 T: 'a + ToOffset,
1624 O: 'a + FromAnchor,
1625 {
1626 self.diagnostics.range(search_range, self, true)
1627 }
1628
1629 pub fn diagnostic_group<'a, O>(
1630 &'a self,
1631 group_id: usize,
1632 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1633 where
1634 O: 'a + FromAnchor,
1635 {
1636 self.diagnostics.group(group_id, self)
1637 }
1638
1639 pub fn diagnostics_update_count(&self) -> usize {
1640 self.diagnostics_update_count
1641 }
1642
1643 pub fn parse_count(&self) -> usize {
1644 self.parse_count
1645 }
1646}
1647
1648impl Clone for BufferSnapshot {
1649 fn clone(&self) -> Self {
1650 Self {
1651 text: self.text.clone(),
1652 tree: self.tree.clone(),
1653 diagnostics: self.diagnostics.clone(),
1654 diagnostics_update_count: self.diagnostics_update_count,
1655 is_parsing: self.is_parsing,
1656 language: self.language.clone(),
1657 parse_count: self.parse_count,
1658 }
1659 }
1660}
1661
1662impl Deref for BufferSnapshot {
1663 type Target = text::BufferSnapshot;
1664
1665 fn deref(&self) -> &Self::Target {
1666 &self.text
1667 }
1668}
1669
1670impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1671 type I = ByteChunks<'a>;
1672
1673 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1674 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1675 }
1676}
1677
1678struct ByteChunks<'a>(rope::Chunks<'a>);
1679
1680impl<'a> Iterator for ByteChunks<'a> {
1681 type Item = &'a [u8];
1682
1683 fn next(&mut self) -> Option<Self::Item> {
1684 self.0.next().map(str::as_bytes)
1685 }
1686}
1687
1688unsafe impl<'a> Send for BufferChunks<'a> {}
1689
1690impl<'a> BufferChunks<'a> {
1691 pub fn seek(&mut self, offset: usize) {
1692 self.range.start = offset;
1693 self.chunks.seek(self.range.start);
1694 if let Some(highlights) = self.highlights.as_mut() {
1695 highlights
1696 .stack
1697 .retain(|(end_offset, _)| *end_offset > offset);
1698 if let Some((mat, capture_ix)) = &highlights.next_capture {
1699 let capture = mat.captures[*capture_ix as usize];
1700 if offset >= capture.node.start_byte() {
1701 let next_capture_end = capture.node.end_byte();
1702 if offset < next_capture_end {
1703 highlights.stack.push((
1704 next_capture_end,
1705 highlights.highlight_map.get(capture.index),
1706 ));
1707 }
1708 highlights.next_capture.take();
1709 }
1710 }
1711 highlights.captures.set_byte_range(self.range.clone());
1712 }
1713 }
1714
1715 pub fn offset(&self) -> usize {
1716 self.range.start
1717 }
1718
1719 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1720 let depth = match endpoint.severity {
1721 DiagnosticSeverity::ERROR => &mut self.error_depth,
1722 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1723 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1724 DiagnosticSeverity::HINT => &mut self.hint_depth,
1725 _ => return,
1726 };
1727 if endpoint.is_start {
1728 *depth += 1;
1729 } else {
1730 *depth -= 1;
1731 }
1732 }
1733
1734 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1735 if self.error_depth > 0 {
1736 Some(DiagnosticSeverity::ERROR)
1737 } else if self.warning_depth > 0 {
1738 Some(DiagnosticSeverity::WARNING)
1739 } else if self.information_depth > 0 {
1740 Some(DiagnosticSeverity::INFORMATION)
1741 } else if self.hint_depth > 0 {
1742 Some(DiagnosticSeverity::HINT)
1743 } else {
1744 None
1745 }
1746 }
1747}
1748
1749impl<'a> Iterator for BufferChunks<'a> {
1750 type Item = Chunk<'a>;
1751
1752 fn next(&mut self) -> Option<Self::Item> {
1753 let mut next_capture_start = usize::MAX;
1754 let mut next_diagnostic_endpoint = usize::MAX;
1755
1756 if let Some(highlights) = self.highlights.as_mut() {
1757 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1758 if *parent_capture_end <= self.range.start {
1759 highlights.stack.pop();
1760 } else {
1761 break;
1762 }
1763 }
1764
1765 if highlights.next_capture.is_none() {
1766 highlights.next_capture = highlights.captures.next();
1767 }
1768
1769 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1770 let capture = mat.captures[*capture_ix as usize];
1771 if self.range.start < capture.node.start_byte() {
1772 next_capture_start = capture.node.start_byte();
1773 break;
1774 } else {
1775 let highlight_id = highlights.highlight_map.get(capture.index);
1776 highlights
1777 .stack
1778 .push((capture.node.end_byte(), highlight_id));
1779 highlights.next_capture = highlights.captures.next();
1780 }
1781 }
1782 }
1783
1784 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1785 if endpoint.offset <= self.range.start {
1786 self.update_diagnostic_depths(endpoint);
1787 self.diagnostic_endpoints.next();
1788 } else {
1789 next_diagnostic_endpoint = endpoint.offset;
1790 break;
1791 }
1792 }
1793
1794 if let Some(chunk) = self.chunks.peek() {
1795 let chunk_start = self.range.start;
1796 let mut chunk_end = (self.chunks.offset() + chunk.len())
1797 .min(next_capture_start)
1798 .min(next_diagnostic_endpoint);
1799 let mut highlight_style = None;
1800 if let Some(highlights) = self.highlights.as_ref() {
1801 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1802 chunk_end = chunk_end.min(*parent_capture_end);
1803 highlight_style = parent_highlight_id.style(highlights.theme);
1804 }
1805 }
1806
1807 let slice =
1808 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1809 self.range.start = chunk_end;
1810 if self.range.start == self.chunks.offset() + chunk.len() {
1811 self.chunks.next().unwrap();
1812 }
1813
1814 Some(Chunk {
1815 text: slice,
1816 highlight_style,
1817 diagnostic: self.current_diagnostic_severity(),
1818 })
1819 } else {
1820 None
1821 }
1822 }
1823}
1824
1825impl QueryCursorHandle {
1826 fn new() -> Self {
1827 QueryCursorHandle(Some(
1828 QUERY_CURSORS
1829 .lock()
1830 .pop()
1831 .unwrap_or_else(|| QueryCursor::new()),
1832 ))
1833 }
1834}
1835
1836impl Deref for QueryCursorHandle {
1837 type Target = QueryCursor;
1838
1839 fn deref(&self) -> &Self::Target {
1840 self.0.as_ref().unwrap()
1841 }
1842}
1843
1844impl DerefMut for QueryCursorHandle {
1845 fn deref_mut(&mut self) -> &mut Self::Target {
1846 self.0.as_mut().unwrap()
1847 }
1848}
1849
1850impl Drop for QueryCursorHandle {
1851 fn drop(&mut self) {
1852 let mut cursor = self.0.take().unwrap();
1853 cursor.set_byte_range(0..usize::MAX);
1854 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1855 QUERY_CURSORS.lock().push(cursor)
1856 }
1857}
1858
1859trait ToTreeSitterPoint {
1860 fn to_ts_point(self) -> tree_sitter::Point;
1861 fn from_ts_point(point: tree_sitter::Point) -> Self;
1862}
1863
1864impl ToTreeSitterPoint for Point {
1865 fn to_ts_point(self) -> tree_sitter::Point {
1866 tree_sitter::Point::new(self.row as usize, self.column as usize)
1867 }
1868
1869 fn from_ts_point(point: tree_sitter::Point) -> Self {
1870 Point::new(point.row as u32, point.column as u32)
1871 }
1872}
1873
1874trait ToPointUtf16 {
1875 fn to_point_utf16(self) -> PointUtf16;
1876}
1877
1878impl ToPointUtf16 for lsp::Position {
1879 fn to_point_utf16(self) -> PointUtf16 {
1880 PointUtf16::new(self.line, self.character)
1881 }
1882}
1883
1884impl operation_queue::Operation for Operation {
1885 fn lamport_timestamp(&self) -> clock::Lamport {
1886 match self {
1887 Operation::Buffer(_) => {
1888 unreachable!("buffer operations should never be deferred at this layer")
1889 }
1890 Operation::UpdateDiagnostics {
1891 lamport_timestamp, ..
1892 } => *lamport_timestamp,
1893 }
1894 }
1895}
1896
1897fn diagnostic_ranges<'a>(
1898 diagnostic: &'a lsp::Diagnostic,
1899 abs_path: Option<&'a Path>,
1900) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
1901 diagnostic
1902 .related_information
1903 .iter()
1904 .flatten()
1905 .filter_map(move |info| {
1906 if info.location.uri.to_file_path().ok()? == abs_path? {
1907 let info_start = PointUtf16::new(
1908 info.location.range.start.line,
1909 info.location.range.start.character,
1910 );
1911 let info_end = PointUtf16::new(
1912 info.location.range.end.line,
1913 info.location.range.end.character,
1914 );
1915 Some(info_start..info_end)
1916 } else {
1917 None
1918 }
1919 })
1920 .chain(Some(
1921 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
1922 ))
1923}
1924
1925pub fn contiguous_ranges(
1926 values: impl Iterator<Item = u32>,
1927 max_len: usize,
1928) -> impl Iterator<Item = Range<u32>> {
1929 let mut values = values.into_iter();
1930 let mut current_range: Option<Range<u32>> = None;
1931 std::iter::from_fn(move || loop {
1932 if let Some(value) = values.next() {
1933 if let Some(range) = &mut current_range {
1934 if value == range.end && range.len() < max_len {
1935 range.end += 1;
1936 continue;
1937 }
1938 }
1939
1940 let prev_range = current_range.clone();
1941 current_range = Some(value..(value + 1));
1942 if prev_range.is_some() {
1943 return prev_range;
1944 }
1945 } else {
1946 return current_range.take();
1947 }
1948 })
1949}