1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use text::operation_queue::OperationQueue;
34pub use text::{Buffer as TextBuffer, Operation as _, *};
35use theme::SyntaxTheme;
36use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
37use util::{post_inc, TryFutureExt as _};
38
39#[cfg(any(test, feature = "test-support"))]
40pub use tree_sitter_rust;
41
42pub use lsp::DiagnosticSeverity;
43
44thread_local! {
45 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
46}
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 diagnostics_update_count: usize,
69 language_server: Option<LanguageServerState>,
70 deferred_ops: OperationQueue<Operation>,
71 #[cfg(test)]
72 pub(crate) operations: Vec<Operation>,
73}
74
75pub struct BufferSnapshot {
76 text: text::BufferSnapshot,
77 tree: Option<Tree>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 is_parsing: bool,
81 language: Option<Arc<Language>>,
82 parse_count: usize,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86pub struct Diagnostic {
87 pub severity: DiagnosticSeverity,
88 pub message: String,
89 pub group_id: usize,
90 pub is_primary: bool,
91}
92
93struct LanguageServerState {
94 server: Arc<LanguageServer>,
95 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
96 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
97 next_version: usize,
98 _maintain_server: Task<Option<()>>,
99}
100
101#[derive(Clone)]
102struct LanguageServerSnapshot {
103 buffer_snapshot: text::BufferSnapshot,
104 version: usize,
105 path: Arc<Path>,
106}
107
108#[derive(Clone, Debug)]
109pub enum Operation {
110 Buffer(text::Operation),
111 UpdateDiagnostics {
112 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
113 lamport_timestamp: clock::Lamport,
114 },
115}
116
117#[derive(Clone, Debug, Eq, PartialEq)]
118pub enum Event {
119 Edited,
120 Dirtied,
121 Saved,
122 FileHandleChanged,
123 Reloaded,
124 Reparsed,
125 DiagnosticsUpdated,
126 Closed,
127}
128
129pub trait File {
130 fn worktree_id(&self) -> usize;
131
132 fn entry_id(&self) -> Option<usize>;
133
134 fn mtime(&self) -> SystemTime;
135
136 /// Returns the path of this file relative to the worktree's root directory.
137 fn path(&self) -> &Arc<Path>;
138
139 /// Returns the absolute path of this file.
140 fn abs_path(&self) -> Option<PathBuf>;
141
142 /// Returns the path of this file relative to the worktree's parent directory (this means it
143 /// includes the name of the worktree's root folder).
144 fn full_path(&self) -> PathBuf;
145
146 /// Returns the last component of this handle's absolute path. If this handle refers to the root
147 /// of its worktree, then this method will return the name of the worktree itself.
148 fn file_name(&self) -> Option<OsString>;
149
150 fn is_deleted(&self) -> bool;
151
152 fn save(
153 &self,
154 buffer_id: u64,
155 text: Rope,
156 version: clock::Global,
157 cx: &mut MutableAppContext,
158 ) -> Task<Result<(clock::Global, SystemTime)>>;
159
160 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
161
162 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
163
164 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
165
166 fn boxed_clone(&self) -> Box<dyn File>;
167
168 fn as_any(&self) -> &dyn Any;
169}
170
171struct QueryCursorHandle(Option<QueryCursor>);
172
173#[derive(Clone)]
174struct SyntaxTree {
175 tree: Tree,
176 version: clock::Global,
177}
178
179#[derive(Clone)]
180struct AutoindentRequest {
181 selection_set_ids: HashSet<SelectionSetId>,
182 before_edit: BufferSnapshot,
183 edited: Vec<Anchor>,
184 inserted: Option<Vec<Range<Anchor>>>,
185}
186
187#[derive(Debug)]
188struct IndentSuggestion {
189 basis_row: u32,
190 indent: bool,
191}
192
193struct TextProvider<'a>(&'a Rope);
194
195struct BufferChunkHighlights<'a> {
196 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
197 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
198 stack: Vec<(usize, HighlightId)>,
199 highlight_map: HighlightMap,
200 theme: &'a SyntaxTheme,
201 _query_cursor: QueryCursorHandle,
202}
203
204pub struct BufferChunks<'a> {
205 range: Range<usize>,
206 chunks: rope::Chunks<'a>,
207 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
208 error_depth: usize,
209 warning_depth: usize,
210 information_depth: usize,
211 hint_depth: usize,
212 highlights: Option<BufferChunkHighlights<'a>>,
213}
214
215#[derive(Clone, Copy, Debug, Default)]
216pub struct Chunk<'a> {
217 pub text: &'a str,
218 pub highlight_style: Option<HighlightStyle>,
219 pub diagnostic: Option<DiagnosticSeverity>,
220}
221
222pub(crate) struct Diff {
223 base_version: clock::Global,
224 new_text: Arc<str>,
225 changes: Vec<(ChangeTag, usize)>,
226}
227
228#[derive(Clone, Copy)]
229struct DiagnosticEndpoint {
230 offset: usize,
231 is_start: bool,
232 severity: DiagnosticSeverity,
233}
234
235impl Buffer {
236 pub fn new<T: Into<Arc<str>>>(
237 replica_id: ReplicaId,
238 base_text: T,
239 cx: &mut ModelContext<Self>,
240 ) -> Self {
241 Self::build(
242 TextBuffer::new(
243 replica_id,
244 cx.model_id() as u64,
245 History::new(base_text.into()),
246 ),
247 None,
248 )
249 }
250
251 pub fn from_file<T: Into<Arc<str>>>(
252 replica_id: ReplicaId,
253 base_text: T,
254 file: Box<dyn File>,
255 cx: &mut ModelContext<Self>,
256 ) -> Self {
257 Self::build(
258 TextBuffer::new(
259 replica_id,
260 cx.model_id() as u64,
261 History::new(base_text.into()),
262 ),
263 Some(file),
264 )
265 }
266
267 pub fn from_proto(
268 replica_id: ReplicaId,
269 message: proto::Buffer,
270 file: Option<Box<dyn File>>,
271 cx: &mut ModelContext<Self>,
272 ) -> Result<Self> {
273 let mut buffer =
274 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
275 let ops = message
276 .history
277 .into_iter()
278 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
279 buffer.apply_ops(ops)?;
280 for set in message.selections {
281 let set = proto::deserialize_selection_set(set);
282 buffer.add_raw_selection_set(set.id, set);
283 }
284 let mut this = Self::build(buffer, file);
285 this.apply_diagnostic_update(
286 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
287 cx,
288 );
289
290 Ok(this)
291 }
292
293 pub fn to_proto(&self) -> proto::Buffer {
294 proto::Buffer {
295 id: self.remote_id(),
296 content: self.text.base_text().to_string(),
297 history: self
298 .text
299 .history()
300 .map(proto::serialize_edit_operation)
301 .collect(),
302 selections: self
303 .selection_sets()
304 .map(|(_, set)| proto::serialize_selection_set(set))
305 .collect(),
306 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
307 }
308 }
309
310 pub fn with_language(
311 mut self,
312 language: Option<Arc<Language>>,
313 language_server: Option<Arc<LanguageServer>>,
314 cx: &mut ModelContext<Self>,
315 ) -> Self {
316 self.set_language(language, language_server, cx);
317 self
318 }
319
320 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
321 let saved_mtime;
322 if let Some(file) = file.as_ref() {
323 saved_mtime = file.mtime();
324 } else {
325 saved_mtime = UNIX_EPOCH;
326 }
327
328 Self {
329 saved_mtime,
330 saved_version: buffer.version(),
331 text: buffer,
332 file,
333 syntax_tree: Mutex::new(None),
334 parsing_in_background: false,
335 parse_count: 0,
336 sync_parse_timeout: Duration::from_millis(1),
337 autoindent_requests: Default::default(),
338 pending_autoindent: Default::default(),
339 language: None,
340 diagnostics: Default::default(),
341 diagnostics_update_count: 0,
342 language_server: None,
343 deferred_ops: OperationQueue::new(),
344 #[cfg(test)]
345 operations: Default::default(),
346 }
347 }
348
349 pub fn snapshot(&self) -> BufferSnapshot {
350 BufferSnapshot {
351 text: self.text.snapshot(),
352 tree: self.syntax_tree(),
353 diagnostics: self.diagnostics.clone(),
354 diagnostics_update_count: self.diagnostics_update_count,
355 is_parsing: self.parsing_in_background,
356 language: self.language.clone(),
357 parse_count: self.parse_count,
358 }
359 }
360
361 pub fn file(&self) -> Option<&dyn File> {
362 self.file.as_deref()
363 }
364
365 pub fn save(
366 &mut self,
367 cx: &mut ModelContext<Self>,
368 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
369 let file = self
370 .file
371 .as_ref()
372 .ok_or_else(|| anyhow!("buffer has no file"))?;
373 let text = self.as_rope().clone();
374 let version = self.version();
375 let save = file.save(self.remote_id(), text, version, cx.as_mut());
376 Ok(cx.spawn(|this, mut cx| async move {
377 let (version, mtime) = save.await?;
378 this.update(&mut cx, |this, cx| {
379 this.did_save(version.clone(), mtime, None, cx);
380 });
381 Ok((version, mtime))
382 }))
383 }
384
385 pub fn set_language(
386 &mut self,
387 language: Option<Arc<Language>>,
388 language_server: Option<Arc<lsp::LanguageServer>>,
389 cx: &mut ModelContext<Self>,
390 ) {
391 self.language = language;
392 self.language_server = if let Some(server) = language_server {
393 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
394 Some(LanguageServerState {
395 latest_snapshot: latest_snapshot_tx,
396 pending_snapshots: Default::default(),
397 next_version: 0,
398 server: server.clone(),
399 _maintain_server: cx.background().spawn(
400 async move {
401 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
402 while let Some(snapshot) = latest_snapshot_rx.recv().await {
403 if let Some(snapshot) = snapshot {
404 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
405 if let Some(prev_snapshot) = prev_snapshot {
406 let changes = lsp::DidChangeTextDocumentParams {
407 text_document: lsp::VersionedTextDocumentIdentifier::new(
408 uri,
409 snapshot.version as i32,
410 ),
411 content_changes: snapshot
412 .buffer_snapshot
413 .edits_since::<(PointUtf16, usize)>(
414 prev_snapshot.buffer_snapshot.version(),
415 )
416 .map(|edit| {
417 let edit_start = edit.new.start.0;
418 let edit_end = edit_start
419 + (edit.old.end.0 - edit.old.start.0);
420 let new_text = snapshot
421 .buffer_snapshot
422 .text_for_range(
423 edit.new.start.1..edit.new.end.1,
424 )
425 .collect();
426 lsp::TextDocumentContentChangeEvent {
427 range: Some(lsp::Range::new(
428 lsp::Position::new(
429 edit_start.row,
430 edit_start.column,
431 ),
432 lsp::Position::new(
433 edit_end.row,
434 edit_end.column,
435 ),
436 )),
437 range_length: None,
438 text: new_text,
439 }
440 })
441 .collect(),
442 };
443 server
444 .notify::<lsp::notification::DidChangeTextDocument>(changes)
445 .await?;
446 } else {
447 server
448 .notify::<lsp::notification::DidOpenTextDocument>(
449 lsp::DidOpenTextDocumentParams {
450 text_document: lsp::TextDocumentItem::new(
451 uri,
452 Default::default(),
453 snapshot.version as i32,
454 snapshot.buffer_snapshot.text().to_string(),
455 ),
456 },
457 )
458 .await?;
459 }
460
461 prev_snapshot = Some(snapshot);
462 }
463 }
464 Ok(())
465 }
466 .log_err(),
467 ),
468 })
469 } else {
470 None
471 };
472
473 self.reparse(cx);
474 self.update_language_server();
475 }
476
477 pub fn did_save(
478 &mut self,
479 version: clock::Global,
480 mtime: SystemTime,
481 new_file: Option<Box<dyn File>>,
482 cx: &mut ModelContext<Self>,
483 ) {
484 self.saved_mtime = mtime;
485 self.saved_version = version;
486 if let Some(new_file) = new_file {
487 self.file = Some(new_file);
488 }
489 if let Some(state) = &self.language_server {
490 cx.background()
491 .spawn(
492 state
493 .server
494 .notify::<lsp::notification::DidSaveTextDocument>(
495 lsp::DidSaveTextDocumentParams {
496 text_document: lsp::TextDocumentIdentifier {
497 uri: lsp::Url::from_file_path(
498 self.file.as_ref().unwrap().abs_path().unwrap(),
499 )
500 .unwrap(),
501 },
502 text: None,
503 },
504 ),
505 )
506 .detach()
507 }
508 cx.emit(Event::Saved);
509 }
510
511 pub fn file_updated(
512 &mut self,
513 new_file: Box<dyn File>,
514 cx: &mut ModelContext<Self>,
515 ) -> Option<Task<()>> {
516 let old_file = self.file.as_ref()?;
517 let mut file_changed = false;
518 let mut task = None;
519
520 if new_file.path() != old_file.path() {
521 file_changed = true;
522 }
523
524 if new_file.is_deleted() {
525 if !old_file.is_deleted() {
526 file_changed = true;
527 if !self.is_dirty() {
528 cx.emit(Event::Dirtied);
529 }
530 }
531 } else {
532 let new_mtime = new_file.mtime();
533 if new_mtime != old_file.mtime() {
534 file_changed = true;
535
536 if !self.is_dirty() {
537 task = Some(cx.spawn(|this, mut cx| {
538 async move {
539 let new_text = this.read_with(&cx, |this, cx| {
540 this.file.as_ref().and_then(|file| file.load_local(cx))
541 });
542 if let Some(new_text) = new_text {
543 let new_text = new_text.await?;
544 let diff = this
545 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
546 .await;
547 this.update(&mut cx, |this, cx| {
548 if this.apply_diff(diff, cx) {
549 this.saved_version = this.version();
550 this.saved_mtime = new_mtime;
551 cx.emit(Event::Reloaded);
552 }
553 });
554 }
555 Ok(())
556 }
557 .log_err()
558 .map(drop)
559 }));
560 }
561 }
562 }
563
564 if file_changed {
565 cx.emit(Event::FileHandleChanged);
566 }
567 self.file = Some(new_file);
568 task
569 }
570
571 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
572 cx.emit(Event::Closed);
573 }
574
575 pub fn language(&self) -> Option<&Arc<Language>> {
576 self.language.as_ref()
577 }
578
579 pub fn parse_count(&self) -> usize {
580 self.parse_count
581 }
582
583 pub fn diagnostics_update_count(&self) -> usize {
584 self.diagnostics_update_count
585 }
586
587 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
588 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
589 self.interpolate_tree(syntax_tree);
590 Some(syntax_tree.tree.clone())
591 } else {
592 None
593 }
594 }
595
596 #[cfg(any(test, feature = "test-support"))]
597 pub fn is_parsing(&self) -> bool {
598 self.parsing_in_background
599 }
600
601 #[cfg(test)]
602 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
603 self.sync_parse_timeout = timeout;
604 }
605
606 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
607 if self.parsing_in_background {
608 return false;
609 }
610
611 if let Some(grammar) = self.grammar().cloned() {
612 let old_tree = self.syntax_tree();
613 let text = self.as_rope().clone();
614 let parsed_version = self.version();
615 let parse_task = cx.background().spawn({
616 let grammar = grammar.clone();
617 async move { Self::parse_text(&text, old_tree, &grammar) }
618 });
619
620 match cx
621 .background()
622 .block_with_timeout(self.sync_parse_timeout, parse_task)
623 {
624 Ok(new_tree) => {
625 self.did_finish_parsing(new_tree, parsed_version, cx);
626 return true;
627 }
628 Err(parse_task) => {
629 self.parsing_in_background = true;
630 cx.spawn(move |this, mut cx| async move {
631 let new_tree = parse_task.await;
632 this.update(&mut cx, move |this, cx| {
633 let grammar_changed = this
634 .grammar()
635 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
636 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
637 this.parsing_in_background = false;
638 this.did_finish_parsing(new_tree, parsed_version, cx);
639
640 if parse_again && this.reparse(cx) {
641 return;
642 }
643 });
644 })
645 .detach();
646 }
647 }
648 }
649 false
650 }
651
652 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
653 PARSER.with(|parser| {
654 let mut parser = parser.borrow_mut();
655 parser
656 .set_language(grammar.ts_language)
657 .expect("incompatible grammar");
658 let mut chunks = text.chunks_in_range(0..text.len());
659 let tree = parser
660 .parse_with(
661 &mut move |offset, _| {
662 chunks.seek(offset);
663 chunks.next().unwrap_or("").as_bytes()
664 },
665 old_tree.as_ref(),
666 )
667 .unwrap();
668 tree
669 })
670 }
671
672 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
673 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
674 let (bytes, lines) = edit.flatten();
675 tree.tree.edit(&InputEdit {
676 start_byte: bytes.new.start,
677 old_end_byte: bytes.new.start + bytes.old.len(),
678 new_end_byte: bytes.new.end,
679 start_position: lines.new.start.to_ts_point(),
680 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
681 .to_ts_point(),
682 new_end_position: lines.new.end.to_ts_point(),
683 });
684 }
685 tree.version = self.version();
686 }
687
688 fn did_finish_parsing(
689 &mut self,
690 tree: Tree,
691 version: clock::Global,
692 cx: &mut ModelContext<Self>,
693 ) {
694 self.parse_count += 1;
695 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
696 self.request_autoindent(cx);
697 cx.emit(Event::Reparsed);
698 cx.notify();
699 }
700
701 pub fn update_diagnostics(
702 &mut self,
703 version: Option<i32>,
704 mut diagnostics: Vec<lsp::Diagnostic>,
705 cx: &mut ModelContext<Self>,
706 ) -> Result<Operation> {
707 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
708
709 let version = version.map(|version| version as usize);
710 let content = if let Some(version) = version {
711 let language_server = self.language_server.as_mut().unwrap();
712 let snapshot = language_server
713 .pending_snapshots
714 .get(&version)
715 .ok_or_else(|| anyhow!("missing snapshot"))?;
716 &snapshot.buffer_snapshot
717 } else {
718 self.deref()
719 };
720 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
721
722 let empty_set = HashSet::new();
723 let disk_based_sources = self
724 .language
725 .as_ref()
726 .and_then(|language| language.disk_based_diagnostic_sources())
727 .unwrap_or(&empty_set);
728
729 let mut edits_since_save = content
730 .edits_since::<PointUtf16>(&self.saved_version)
731 .peekable();
732 let mut last_edit_old_end = PointUtf16::zero();
733 let mut last_edit_new_end = PointUtf16::zero();
734 let mut group_ids_by_diagnostic_range = HashMap::new();
735 let mut diagnostics_by_group_id = HashMap::new();
736 let mut next_group_id = 0;
737 'outer: for diagnostic in &diagnostics {
738 let mut start = diagnostic.range.start.to_point_utf16();
739 let mut end = diagnostic.range.end.to_point_utf16();
740 let source = diagnostic.source.as_ref();
741 let code = diagnostic.code.as_ref();
742 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
743 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
744 .copied()
745 .unwrap_or_else(|| {
746 let group_id = post_inc(&mut next_group_id);
747 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
748 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
749 }
750 group_id
751 });
752
753 if diagnostic
754 .source
755 .as_ref()
756 .map_or(false, |source| disk_based_sources.contains(source))
757 {
758 while let Some(edit) = edits_since_save.peek() {
759 if edit.old.end <= start {
760 last_edit_old_end = edit.old.end;
761 last_edit_new_end = edit.new.end;
762 edits_since_save.next();
763 } else if edit.old.start <= end && edit.old.end >= start {
764 continue 'outer;
765 } else {
766 break;
767 }
768 }
769
770 start = last_edit_new_end + (start - last_edit_old_end);
771 end = last_edit_new_end + (end - last_edit_old_end);
772 }
773
774 let mut range = content.clip_point_utf16(start, Bias::Left)
775 ..content.clip_point_utf16(end, Bias::Right);
776 if range.start == range.end {
777 range.end.column += 1;
778 range.end = content.clip_point_utf16(range.end, Bias::Right);
779 if range.start == range.end && range.end.column > 0 {
780 range.start.column -= 1;
781 range.start = content.clip_point_utf16(range.start, Bias::Left);
782 }
783 }
784
785 diagnostics_by_group_id
786 .entry(group_id)
787 .or_insert(Vec::new())
788 .push(DiagnosticEntry {
789 range,
790 diagnostic: Diagnostic {
791 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
792 message: diagnostic.message.clone(),
793 group_id,
794 is_primary: false,
795 },
796 });
797 }
798
799 drop(edits_since_save);
800 let new_diagnostics = DiagnosticSet::new(
801 diagnostics_by_group_id
802 .into_values()
803 .flat_map(|mut diagnostics| {
804 let primary = diagnostics
805 .iter_mut()
806 .min_by_key(|entry| entry.diagnostic.severity)
807 .unwrap();
808 primary.diagnostic.is_primary = true;
809 diagnostics
810 }),
811 content,
812 );
813 self.diagnostics = new_diagnostics;
814
815 if let Some(version) = version {
816 let language_server = self.language_server.as_mut().unwrap();
817 let versions_to_delete = language_server
818 .pending_snapshots
819 .range(..version)
820 .map(|(v, _)| *v)
821 .collect::<Vec<_>>();
822 for version in versions_to_delete {
823 language_server.pending_snapshots.remove(&version);
824 }
825 }
826
827 self.diagnostics_update_count += 1;
828 cx.notify();
829 cx.emit(Event::DiagnosticsUpdated);
830 Ok(Operation::UpdateDiagnostics {
831 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
832 lamport_timestamp: self.lamport_timestamp(),
833 })
834 }
835
836 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
837 if let Some(indent_columns) = self.compute_autoindents() {
838 let indent_columns = cx.background().spawn(indent_columns);
839 match cx
840 .background()
841 .block_with_timeout(Duration::from_micros(500), indent_columns)
842 {
843 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
844 Err(indent_columns) => {
845 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
846 let indent_columns = indent_columns.await;
847 this.update(&mut cx, |this, cx| {
848 this.apply_autoindents(indent_columns, cx);
849 });
850 }));
851 }
852 }
853 }
854 }
855
856 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
857 let max_rows_between_yields = 100;
858 let snapshot = self.snapshot();
859 if snapshot.language.is_none()
860 || snapshot.tree.is_none()
861 || self.autoindent_requests.is_empty()
862 {
863 return None;
864 }
865
866 let autoindent_requests = self.autoindent_requests.clone();
867 Some(async move {
868 let mut indent_columns = BTreeMap::new();
869 for request in autoindent_requests {
870 let old_to_new_rows = request
871 .edited
872 .iter()
873 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
874 .zip(
875 request
876 .edited
877 .iter()
878 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
879 )
880 .collect::<BTreeMap<u32, u32>>();
881
882 let mut old_suggestions = HashMap::<u32, u32>::default();
883 let old_edited_ranges =
884 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
885 for old_edited_range in old_edited_ranges {
886 let suggestions = request
887 .before_edit
888 .suggest_autoindents(old_edited_range.clone())
889 .into_iter()
890 .flatten();
891 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
892 let indentation_basis = old_to_new_rows
893 .get(&suggestion.basis_row)
894 .and_then(|from_row| old_suggestions.get(from_row).copied())
895 .unwrap_or_else(|| {
896 request
897 .before_edit
898 .indent_column_for_line(suggestion.basis_row)
899 });
900 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
901 old_suggestions.insert(
902 *old_to_new_rows.get(&old_row).unwrap(),
903 indentation_basis + delta,
904 );
905 }
906 yield_now().await;
907 }
908
909 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
910 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
911 let new_edited_row_ranges =
912 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
913 for new_edited_row_range in new_edited_row_ranges {
914 let suggestions = snapshot
915 .suggest_autoindents(new_edited_row_range.clone())
916 .into_iter()
917 .flatten();
918 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
919 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
920 let new_indentation = indent_columns
921 .get(&suggestion.basis_row)
922 .copied()
923 .unwrap_or_else(|| {
924 snapshot.indent_column_for_line(suggestion.basis_row)
925 })
926 + delta;
927 if old_suggestions
928 .get(&new_row)
929 .map_or(true, |old_indentation| new_indentation != *old_indentation)
930 {
931 indent_columns.insert(new_row, new_indentation);
932 }
933 }
934 yield_now().await;
935 }
936
937 if let Some(inserted) = request.inserted.as_ref() {
938 let inserted_row_ranges = contiguous_ranges(
939 inserted
940 .iter()
941 .map(|range| range.to_point(&snapshot))
942 .flat_map(|range| range.start.row..range.end.row + 1),
943 max_rows_between_yields,
944 );
945 for inserted_row_range in inserted_row_ranges {
946 let suggestions = snapshot
947 .suggest_autoindents(inserted_row_range.clone())
948 .into_iter()
949 .flatten();
950 for (row, suggestion) in inserted_row_range.zip(suggestions) {
951 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
952 let new_indentation = indent_columns
953 .get(&suggestion.basis_row)
954 .copied()
955 .unwrap_or_else(|| {
956 snapshot.indent_column_for_line(suggestion.basis_row)
957 })
958 + delta;
959 indent_columns.insert(row, new_indentation);
960 }
961 yield_now().await;
962 }
963 }
964 }
965 indent_columns
966 })
967 }
968
969 fn apply_autoindents(
970 &mut self,
971 indent_columns: BTreeMap<u32, u32>,
972 cx: &mut ModelContext<Self>,
973 ) {
974 let selection_set_ids = self
975 .autoindent_requests
976 .drain(..)
977 .flat_map(|req| req.selection_set_ids.clone())
978 .collect::<HashSet<_>>();
979
980 self.start_transaction(selection_set_ids.iter().copied());
981 for (row, indent_column) in &indent_columns {
982 self.set_indent_column_for_line(*row, *indent_column, cx);
983 }
984
985 for selection_set_id in &selection_set_ids {
986 if let Ok(set) = self.selection_set(*selection_set_id) {
987 let new_selections = set
988 .selections::<Point>(&*self)
989 .map(|selection| {
990 if selection.start.column == 0 {
991 let delta = Point::new(
992 0,
993 indent_columns
994 .get(&selection.start.row)
995 .copied()
996 .unwrap_or(0),
997 );
998 if delta.column > 0 {
999 return Selection {
1000 id: selection.id,
1001 goal: selection.goal,
1002 reversed: selection.reversed,
1003 start: selection.start + delta,
1004 end: selection.end + delta,
1005 };
1006 }
1007 }
1008 selection
1009 })
1010 .collect::<Vec<_>>();
1011 self.update_selection_set(*selection_set_id, &new_selections, cx)
1012 .unwrap();
1013 }
1014 }
1015
1016 self.end_transaction(selection_set_ids.iter().copied(), cx);
1017 }
1018
1019 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1020 let current_column = self.indent_column_for_line(row);
1021 if column > current_column {
1022 let offset = Point::new(row, 0).to_offset(&*self);
1023 self.edit(
1024 [offset..offset],
1025 " ".repeat((column - current_column) as usize),
1026 cx,
1027 );
1028 } else if column < current_column {
1029 self.edit(
1030 [Point::new(row, 0)..Point::new(row, current_column - column)],
1031 "",
1032 cx,
1033 );
1034 }
1035 }
1036
1037 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1038 // TODO: it would be nice to not allocate here.
1039 let old_text = self.text();
1040 let base_version = self.version();
1041 cx.background().spawn(async move {
1042 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1043 .iter_all_changes()
1044 .map(|c| (c.tag(), c.value().len()))
1045 .collect::<Vec<_>>();
1046 Diff {
1047 base_version,
1048 new_text,
1049 changes,
1050 }
1051 })
1052 }
1053
1054 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1055 if self.version == diff.base_version {
1056 self.start_transaction(None);
1057 let mut offset = 0;
1058 for (tag, len) in diff.changes {
1059 let range = offset..(offset + len);
1060 match tag {
1061 ChangeTag::Equal => offset += len,
1062 ChangeTag::Delete => self.edit(Some(range), "", cx),
1063 ChangeTag::Insert => {
1064 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1065 offset += len;
1066 }
1067 }
1068 }
1069 self.end_transaction(None, cx);
1070 true
1071 } else {
1072 false
1073 }
1074 }
1075
1076 pub fn is_dirty(&self) -> bool {
1077 !self.saved_version.ge(&self.version)
1078 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1079 }
1080
1081 pub fn has_conflict(&self) -> bool {
1082 !self.saved_version.ge(&self.version)
1083 && self
1084 .file
1085 .as_ref()
1086 .map_or(false, |file| file.mtime() > self.saved_mtime)
1087 }
1088
1089 pub fn subscribe(&mut self) -> Subscription {
1090 self.text.subscribe()
1091 }
1092
1093 pub fn start_transaction(
1094 &mut self,
1095 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1096 ) -> Option<TransactionId> {
1097 self.start_transaction_at(selection_set_ids, Instant::now())
1098 }
1099
1100 pub(crate) fn start_transaction_at(
1101 &mut self,
1102 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1103 now: Instant,
1104 ) -> Option<TransactionId> {
1105 self.text.start_transaction_at(selection_set_ids, now)
1106 }
1107
1108 pub fn end_transaction(
1109 &mut self,
1110 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1111 cx: &mut ModelContext<Self>,
1112 ) -> Option<TransactionId> {
1113 self.end_transaction_at(selection_set_ids, Instant::now(), cx)
1114 }
1115
1116 pub(crate) fn end_transaction_at(
1117 &mut self,
1118 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1119 now: Instant,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<TransactionId> {
1122 if let Some((transaction_id, start_version)) =
1123 self.text.end_transaction_at(selection_set_ids, now)
1124 {
1125 let was_dirty = start_version != self.saved_version;
1126 self.did_edit(&start_version, was_dirty, cx);
1127 Some(transaction_id)
1128 } else {
1129 None
1130 }
1131 }
1132
1133 fn update_language_server(&mut self) {
1134 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1135 language_server
1136 } else {
1137 return;
1138 };
1139 let abs_path = self
1140 .file
1141 .as_ref()
1142 .map_or(Path::new("/").to_path_buf(), |file| {
1143 file.abs_path().unwrap()
1144 });
1145
1146 let version = post_inc(&mut language_server.next_version);
1147 let snapshot = LanguageServerSnapshot {
1148 buffer_snapshot: self.text.snapshot(),
1149 version,
1150 path: Arc::from(abs_path),
1151 };
1152 language_server
1153 .pending_snapshots
1154 .insert(version, snapshot.clone());
1155 let _ = language_server
1156 .latest_snapshot
1157 .blocking_send(Some(snapshot));
1158 }
1159
1160 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1161 where
1162 I: IntoIterator<Item = Range<S>>,
1163 S: ToOffset,
1164 T: Into<String>,
1165 {
1166 self.edit_internal(ranges_iter, new_text, false, cx)
1167 }
1168
1169 pub fn edit_with_autoindent<I, S, T>(
1170 &mut self,
1171 ranges_iter: I,
1172 new_text: T,
1173 cx: &mut ModelContext<Self>,
1174 ) where
1175 I: IntoIterator<Item = Range<S>>,
1176 S: ToOffset,
1177 T: Into<String>,
1178 {
1179 self.edit_internal(ranges_iter, new_text, true, cx)
1180 }
1181
1182 pub fn edit_internal<I, S, T>(
1183 &mut self,
1184 ranges_iter: I,
1185 new_text: T,
1186 autoindent: bool,
1187 cx: &mut ModelContext<Self>,
1188 ) where
1189 I: IntoIterator<Item = Range<S>>,
1190 S: ToOffset,
1191 T: Into<String>,
1192 {
1193 let new_text = new_text.into();
1194
1195 // Skip invalid ranges and coalesce contiguous ones.
1196 let mut ranges: Vec<Range<usize>> = Vec::new();
1197 for range in ranges_iter {
1198 let range = range.start.to_offset(self)..range.end.to_offset(self);
1199 if !new_text.is_empty() || !range.is_empty() {
1200 if let Some(prev_range) = ranges.last_mut() {
1201 if prev_range.end >= range.start {
1202 prev_range.end = cmp::max(prev_range.end, range.end);
1203 } else {
1204 ranges.push(range);
1205 }
1206 } else {
1207 ranges.push(range);
1208 }
1209 }
1210 }
1211 if ranges.is_empty() {
1212 return;
1213 }
1214
1215 self.start_transaction(None);
1216 self.pending_autoindent.take();
1217 let autoindent_request = if autoindent && self.language.is_some() {
1218 let before_edit = self.snapshot();
1219 let edited = ranges
1220 .iter()
1221 .filter_map(|range| {
1222 let start = range.start.to_point(self);
1223 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1224 None
1225 } else {
1226 Some(self.anchor_before(range.start))
1227 }
1228 })
1229 .collect();
1230 Some((before_edit, edited))
1231 } else {
1232 None
1233 };
1234
1235 let first_newline_ix = new_text.find('\n');
1236 let new_text_len = new_text.len();
1237
1238 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1239
1240 if let Some((before_edit, edited)) = autoindent_request {
1241 let mut inserted = None;
1242 if let Some(first_newline_ix) = first_newline_ix {
1243 let mut delta = 0isize;
1244 inserted = Some(
1245 ranges
1246 .iter()
1247 .map(|range| {
1248 let start =
1249 (delta + range.start as isize) as usize + first_newline_ix + 1;
1250 let end = (delta + range.start as isize) as usize + new_text_len;
1251 delta +=
1252 (range.end as isize - range.start as isize) + new_text_len as isize;
1253 self.anchor_before(start)..self.anchor_after(end)
1254 })
1255 .collect(),
1256 );
1257 }
1258
1259 let selection_set_ids = self
1260 .text
1261 .peek_undo_stack()
1262 .unwrap()
1263 .starting_selection_set_ids()
1264 .collect();
1265 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1266 selection_set_ids,
1267 before_edit,
1268 edited,
1269 inserted,
1270 }));
1271 }
1272
1273 self.end_transaction(None, cx);
1274 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1275 }
1276
1277 fn did_edit(
1278 &mut self,
1279 old_version: &clock::Global,
1280 was_dirty: bool,
1281 cx: &mut ModelContext<Self>,
1282 ) {
1283 if self.edits_since::<usize>(old_version).next().is_none() {
1284 return;
1285 }
1286
1287 self.reparse(cx);
1288 self.update_language_server();
1289
1290 cx.emit(Event::Edited);
1291 if !was_dirty {
1292 cx.emit(Event::Dirtied);
1293 }
1294 cx.notify();
1295 }
1296
1297 fn grammar(&self) -> Option<&Arc<Grammar>> {
1298 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1299 }
1300
1301 pub fn add_selection_set<T: ToOffset>(
1302 &mut self,
1303 selections: &[Selection<T>],
1304 cx: &mut ModelContext<Self>,
1305 ) -> SelectionSetId {
1306 let operation = self.text.add_selection_set(selections);
1307 if let text::Operation::UpdateSelections { set_id, .. } = &operation {
1308 let set_id = *set_id;
1309 cx.notify();
1310 self.send_operation(Operation::Buffer(operation), cx);
1311 set_id
1312 } else {
1313 unreachable!()
1314 }
1315 }
1316
1317 pub fn update_selection_set<T: ToOffset>(
1318 &mut self,
1319 set_id: SelectionSetId,
1320 selections: &[Selection<T>],
1321 cx: &mut ModelContext<Self>,
1322 ) -> Result<()> {
1323 let operation = self.text.update_selection_set(set_id, selections)?;
1324 cx.notify();
1325 self.send_operation(Operation::Buffer(operation), cx);
1326 Ok(())
1327 }
1328
1329 pub fn set_active_selection_set(
1330 &mut self,
1331 set_id: Option<SelectionSetId>,
1332 cx: &mut ModelContext<Self>,
1333 ) -> Result<()> {
1334 let operation = self.text.set_active_selection_set(set_id)?;
1335 self.send_operation(Operation::Buffer(operation), cx);
1336 Ok(())
1337 }
1338
1339 pub fn remove_selection_set(
1340 &mut self,
1341 set_id: SelectionSetId,
1342 cx: &mut ModelContext<Self>,
1343 ) -> Result<()> {
1344 let operation = self.text.remove_selection_set(set_id)?;
1345 cx.notify();
1346 self.send_operation(Operation::Buffer(operation), cx);
1347 Ok(())
1348 }
1349
1350 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1351 &mut self,
1352 ops: I,
1353 cx: &mut ModelContext<Self>,
1354 ) -> Result<()> {
1355 self.pending_autoindent.take();
1356 let was_dirty = self.is_dirty();
1357 let old_version = self.version.clone();
1358 let mut deferred_ops = Vec::new();
1359 let buffer_ops = ops
1360 .into_iter()
1361 .filter_map(|op| match op {
1362 Operation::Buffer(op) => Some(op),
1363 _ => {
1364 if self.can_apply_op(&op) {
1365 self.apply_op(op, cx);
1366 } else {
1367 deferred_ops.push(op);
1368 }
1369 None
1370 }
1371 })
1372 .collect::<Vec<_>>();
1373 self.text.apply_ops(buffer_ops)?;
1374 self.flush_deferred_ops(cx);
1375 self.did_edit(&old_version, was_dirty, cx);
1376 // Notify independently of whether the buffer was edited as the operations could include a
1377 // selection update.
1378 cx.notify();
1379 Ok(())
1380 }
1381
1382 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1383 let mut deferred_ops = Vec::new();
1384 for op in self.deferred_ops.drain().iter().cloned() {
1385 if self.can_apply_op(&op) {
1386 self.apply_op(op, cx);
1387 } else {
1388 deferred_ops.push(op);
1389 }
1390 }
1391 self.deferred_ops.insert(deferred_ops);
1392 }
1393
1394 fn can_apply_op(&self, operation: &Operation) -> bool {
1395 match operation {
1396 Operation::Buffer(_) => {
1397 unreachable!("buffer operations should never be applied at this layer")
1398 }
1399 Operation::UpdateDiagnostics { diagnostics, .. } => {
1400 diagnostics.iter().all(|diagnostic| {
1401 self.text.can_resolve(&diagnostic.range.start)
1402 && self.text.can_resolve(&diagnostic.range.end)
1403 })
1404 }
1405 }
1406 }
1407
1408 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1409 match operation {
1410 Operation::Buffer(_) => {
1411 unreachable!("buffer operations should never be applied at this layer")
1412 }
1413 Operation::UpdateDiagnostics { diagnostics, .. } => {
1414 self.apply_diagnostic_update(diagnostics, cx);
1415 }
1416 }
1417 }
1418
1419 fn apply_diagnostic_update(
1420 &mut self,
1421 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1422 cx: &mut ModelContext<Self>,
1423 ) {
1424 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1425 self.diagnostics_update_count += 1;
1426 cx.notify();
1427 }
1428
1429 #[cfg(not(test))]
1430 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1431 if let Some(file) = &self.file {
1432 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1433 }
1434 }
1435
1436 #[cfg(test)]
1437 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1438 self.operations.push(operation);
1439 }
1440
1441 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1442 self.text.remove_peer(replica_id);
1443 cx.notify();
1444 }
1445
1446 pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
1447 let was_dirty = self.is_dirty();
1448 let old_version = self.version.clone();
1449
1450 for operation in self.text.undo() {
1451 self.send_operation(Operation::Buffer(operation), cx);
1452 }
1453
1454 self.did_edit(&old_version, was_dirty, cx);
1455 }
1456
1457 pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
1458 let was_dirty = self.is_dirty();
1459 let old_version = self.version.clone();
1460
1461 for operation in self.text.redo() {
1462 self.send_operation(Operation::Buffer(operation), cx);
1463 }
1464
1465 self.did_edit(&old_version, was_dirty, cx);
1466 }
1467}
1468
1469#[cfg(any(test, feature = "test-support"))]
1470impl Buffer {
1471 pub fn randomly_edit<T>(
1472 &mut self,
1473 rng: &mut T,
1474 old_range_count: usize,
1475 cx: &mut ModelContext<Self>,
1476 ) where
1477 T: rand::Rng,
1478 {
1479 self.start_transaction(None);
1480 self.text.randomly_edit(rng, old_range_count);
1481 self.end_transaction(None, cx);
1482 }
1483
1484 pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
1485 where
1486 T: rand::Rng,
1487 {
1488 self.start_transaction(None);
1489 self.text.randomly_mutate(rng);
1490 self.end_transaction(None, cx);
1491 }
1492}
1493
1494impl Entity for Buffer {
1495 type Event = Event;
1496
1497 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1498 if let Some(file) = self.file.as_ref() {
1499 file.buffer_removed(self.remote_id(), cx);
1500 }
1501 }
1502}
1503
1504impl Deref for Buffer {
1505 type Target = TextBuffer;
1506
1507 fn deref(&self) -> &Self::Target {
1508 &self.text
1509 }
1510}
1511
1512impl BufferSnapshot {
1513 fn suggest_autoindents<'a>(
1514 &'a self,
1515 row_range: Range<u32>,
1516 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1517 let mut query_cursor = QueryCursorHandle::new();
1518 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1519 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1520
1521 // Get the "indentation ranges" that intersect this row range.
1522 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1523 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1524 query_cursor.set_point_range(
1525 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1526 ..Point::new(row_range.end, 0).to_ts_point(),
1527 );
1528 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1529 for mat in query_cursor.matches(
1530 &grammar.indents_query,
1531 tree.root_node(),
1532 TextProvider(self.as_rope()),
1533 ) {
1534 let mut node_kind = "";
1535 let mut start: Option<Point> = None;
1536 let mut end: Option<Point> = None;
1537 for capture in mat.captures {
1538 if Some(capture.index) == indent_capture_ix {
1539 node_kind = capture.node.kind();
1540 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1541 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1542 } else if Some(capture.index) == end_capture_ix {
1543 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1544 }
1545 }
1546
1547 if let Some((start, end)) = start.zip(end) {
1548 if start.row == end.row {
1549 continue;
1550 }
1551
1552 let range = start..end;
1553 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1554 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1555 Ok(ix) => {
1556 let prev_range = &mut indentation_ranges[ix];
1557 prev_range.0.end = prev_range.0.end.max(range.end);
1558 }
1559 }
1560 }
1561 }
1562
1563 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1564 Some(row_range.map(move |row| {
1565 let row_start = Point::new(row, self.indent_column_for_line(row));
1566
1567 let mut indent_from_prev_row = false;
1568 let mut outdent_to_row = u32::MAX;
1569 for (range, _node_kind) in &indentation_ranges {
1570 if range.start.row >= row {
1571 break;
1572 }
1573
1574 if range.start.row == prev_row && range.end > row_start {
1575 indent_from_prev_row = true;
1576 }
1577 if range.end.row >= prev_row && range.end <= row_start {
1578 outdent_to_row = outdent_to_row.min(range.start.row);
1579 }
1580 }
1581
1582 let suggestion = if outdent_to_row == prev_row {
1583 IndentSuggestion {
1584 basis_row: prev_row,
1585 indent: false,
1586 }
1587 } else if indent_from_prev_row {
1588 IndentSuggestion {
1589 basis_row: prev_row,
1590 indent: true,
1591 }
1592 } else if outdent_to_row < prev_row {
1593 IndentSuggestion {
1594 basis_row: outdent_to_row,
1595 indent: false,
1596 }
1597 } else {
1598 IndentSuggestion {
1599 basis_row: prev_row,
1600 indent: false,
1601 }
1602 };
1603
1604 prev_row = row;
1605 suggestion
1606 }))
1607 } else {
1608 None
1609 }
1610 }
1611
1612 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1613 while row > 0 {
1614 row -= 1;
1615 if !self.is_line_blank(row) {
1616 return Some(row);
1617 }
1618 }
1619 None
1620 }
1621
1622 pub fn chunks<'a, T: ToOffset>(
1623 &'a self,
1624 range: Range<T>,
1625 theme: Option<&'a SyntaxTheme>,
1626 ) -> BufferChunks<'a> {
1627 let range = range.start.to_offset(self)..range.end.to_offset(self);
1628
1629 let mut highlights = None;
1630 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1631 if let Some(theme) = theme {
1632 for entry in self
1633 .diagnostics
1634 .range::<_, usize>(range.clone(), self, true)
1635 {
1636 diagnostic_endpoints.push(DiagnosticEndpoint {
1637 offset: entry.range.start,
1638 is_start: true,
1639 severity: entry.diagnostic.severity,
1640 });
1641 diagnostic_endpoints.push(DiagnosticEndpoint {
1642 offset: entry.range.end,
1643 is_start: false,
1644 severity: entry.diagnostic.severity,
1645 });
1646 }
1647 diagnostic_endpoints
1648 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1649
1650 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1651 let mut query_cursor = QueryCursorHandle::new();
1652
1653 // TODO - add a Tree-sitter API to remove the need for this.
1654 let cursor = unsafe {
1655 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1656 };
1657 let captures = cursor.set_byte_range(range.clone()).captures(
1658 &grammar.highlights_query,
1659 tree.root_node(),
1660 TextProvider(self.text.as_rope()),
1661 );
1662 highlights = Some(BufferChunkHighlights {
1663 captures,
1664 next_capture: None,
1665 stack: Default::default(),
1666 highlight_map: grammar.highlight_map(),
1667 _query_cursor: query_cursor,
1668 theme,
1669 })
1670 }
1671 }
1672
1673 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1674 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1675
1676 BufferChunks {
1677 range,
1678 chunks,
1679 diagnostic_endpoints,
1680 error_depth: 0,
1681 warning_depth: 0,
1682 information_depth: 0,
1683 hint_depth: 0,
1684 highlights,
1685 }
1686 }
1687
1688 pub fn language(&self) -> Option<&Arc<Language>> {
1689 self.language.as_ref()
1690 }
1691
1692 fn grammar(&self) -> Option<&Arc<Grammar>> {
1693 self.language
1694 .as_ref()
1695 .and_then(|language| language.grammar.as_ref())
1696 }
1697
1698 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1699 if let Some(tree) = self.tree.as_ref() {
1700 let root = tree.root_node();
1701 let range = range.start.to_offset(self)..range.end.to_offset(self);
1702 let mut node = root.descendant_for_byte_range(range.start, range.end);
1703 while node.map_or(false, |n| n.byte_range() == range) {
1704 node = node.unwrap().parent();
1705 }
1706 node.map(|n| n.byte_range())
1707 } else {
1708 None
1709 }
1710 }
1711
1712 pub fn enclosing_bracket_ranges<T: ToOffset>(
1713 &self,
1714 range: Range<T>,
1715 ) -> Option<(Range<usize>, Range<usize>)> {
1716 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1717 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1718 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1719
1720 // Find bracket pairs that *inclusively* contain the given range.
1721 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1722 let mut cursor = QueryCursorHandle::new();
1723 let matches = cursor.set_byte_range(range).matches(
1724 &grammar.brackets_query,
1725 tree.root_node(),
1726 TextProvider(self.as_rope()),
1727 );
1728
1729 // Get the ranges of the innermost pair of brackets.
1730 matches
1731 .filter_map(|mat| {
1732 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1733 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1734 Some((open.byte_range(), close.byte_range()))
1735 })
1736 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1737 }
1738
1739 pub fn diagnostics_in_range<'a, T, O>(
1740 &'a self,
1741 search_range: Range<T>,
1742 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1743 where
1744 T: 'a + ToOffset,
1745 O: 'a + FromAnchor,
1746 {
1747 self.diagnostics.range(search_range, self, true)
1748 }
1749
1750 pub fn diagnostic_group<'a, O>(
1751 &'a self,
1752 group_id: usize,
1753 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1754 where
1755 O: 'a + FromAnchor,
1756 {
1757 self.diagnostics.group(group_id, self)
1758 }
1759
1760 pub fn diagnostics_update_count(&self) -> usize {
1761 self.diagnostics_update_count
1762 }
1763
1764 pub fn parse_count(&self) -> usize {
1765 self.parse_count
1766 }
1767}
1768
1769impl Clone for BufferSnapshot {
1770 fn clone(&self) -> Self {
1771 Self {
1772 text: self.text.clone(),
1773 tree: self.tree.clone(),
1774 diagnostics: self.diagnostics.clone(),
1775 diagnostics_update_count: self.diagnostics_update_count,
1776 is_parsing: self.is_parsing,
1777 language: self.language.clone(),
1778 parse_count: self.parse_count,
1779 }
1780 }
1781}
1782
1783impl Deref for BufferSnapshot {
1784 type Target = text::BufferSnapshot;
1785
1786 fn deref(&self) -> &Self::Target {
1787 &self.text
1788 }
1789}
1790
1791impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1792 type I = ByteChunks<'a>;
1793
1794 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1795 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1796 }
1797}
1798
1799struct ByteChunks<'a>(rope::Chunks<'a>);
1800
1801impl<'a> Iterator for ByteChunks<'a> {
1802 type Item = &'a [u8];
1803
1804 fn next(&mut self) -> Option<Self::Item> {
1805 self.0.next().map(str::as_bytes)
1806 }
1807}
1808
1809unsafe impl<'a> Send for BufferChunks<'a> {}
1810
1811impl<'a> BufferChunks<'a> {
1812 pub fn seek(&mut self, offset: usize) {
1813 self.range.start = offset;
1814 self.chunks.seek(self.range.start);
1815 if let Some(highlights) = self.highlights.as_mut() {
1816 highlights
1817 .stack
1818 .retain(|(end_offset, _)| *end_offset > offset);
1819 if let Some((mat, capture_ix)) = &highlights.next_capture {
1820 let capture = mat.captures[*capture_ix as usize];
1821 if offset >= capture.node.start_byte() {
1822 let next_capture_end = capture.node.end_byte();
1823 if offset < next_capture_end {
1824 highlights.stack.push((
1825 next_capture_end,
1826 highlights.highlight_map.get(capture.index),
1827 ));
1828 }
1829 highlights.next_capture.take();
1830 }
1831 }
1832 highlights.captures.set_byte_range(self.range.clone());
1833 }
1834 }
1835
1836 pub fn offset(&self) -> usize {
1837 self.range.start
1838 }
1839
1840 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1841 let depth = match endpoint.severity {
1842 DiagnosticSeverity::ERROR => &mut self.error_depth,
1843 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1844 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1845 DiagnosticSeverity::HINT => &mut self.hint_depth,
1846 _ => return,
1847 };
1848 if endpoint.is_start {
1849 *depth += 1;
1850 } else {
1851 *depth -= 1;
1852 }
1853 }
1854
1855 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1856 if self.error_depth > 0 {
1857 Some(DiagnosticSeverity::ERROR)
1858 } else if self.warning_depth > 0 {
1859 Some(DiagnosticSeverity::WARNING)
1860 } else if self.information_depth > 0 {
1861 Some(DiagnosticSeverity::INFORMATION)
1862 } else if self.hint_depth > 0 {
1863 Some(DiagnosticSeverity::HINT)
1864 } else {
1865 None
1866 }
1867 }
1868}
1869
1870impl<'a> Iterator for BufferChunks<'a> {
1871 type Item = Chunk<'a>;
1872
1873 fn next(&mut self) -> Option<Self::Item> {
1874 let mut next_capture_start = usize::MAX;
1875 let mut next_diagnostic_endpoint = usize::MAX;
1876
1877 if let Some(highlights) = self.highlights.as_mut() {
1878 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1879 if *parent_capture_end <= self.range.start {
1880 highlights.stack.pop();
1881 } else {
1882 break;
1883 }
1884 }
1885
1886 if highlights.next_capture.is_none() {
1887 highlights.next_capture = highlights.captures.next();
1888 }
1889
1890 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1891 let capture = mat.captures[*capture_ix as usize];
1892 if self.range.start < capture.node.start_byte() {
1893 next_capture_start = capture.node.start_byte();
1894 break;
1895 } else {
1896 let highlight_id = highlights.highlight_map.get(capture.index);
1897 highlights
1898 .stack
1899 .push((capture.node.end_byte(), highlight_id));
1900 highlights.next_capture = highlights.captures.next();
1901 }
1902 }
1903 }
1904
1905 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1906 if endpoint.offset <= self.range.start {
1907 self.update_diagnostic_depths(endpoint);
1908 self.diagnostic_endpoints.next();
1909 } else {
1910 next_diagnostic_endpoint = endpoint.offset;
1911 break;
1912 }
1913 }
1914
1915 if let Some(chunk) = self.chunks.peek() {
1916 let chunk_start = self.range.start;
1917 let mut chunk_end = (self.chunks.offset() + chunk.len())
1918 .min(next_capture_start)
1919 .min(next_diagnostic_endpoint);
1920 let mut highlight_style = None;
1921 if let Some(highlights) = self.highlights.as_ref() {
1922 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1923 chunk_end = chunk_end.min(*parent_capture_end);
1924 highlight_style = parent_highlight_id.style(highlights.theme);
1925 }
1926 }
1927
1928 let slice =
1929 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1930 self.range.start = chunk_end;
1931 if self.range.start == self.chunks.offset() + chunk.len() {
1932 self.chunks.next().unwrap();
1933 }
1934
1935 Some(Chunk {
1936 text: slice,
1937 highlight_style,
1938 diagnostic: self.current_diagnostic_severity(),
1939 })
1940 } else {
1941 None
1942 }
1943 }
1944}
1945
1946impl QueryCursorHandle {
1947 fn new() -> Self {
1948 QueryCursorHandle(Some(
1949 QUERY_CURSORS
1950 .lock()
1951 .pop()
1952 .unwrap_or_else(|| QueryCursor::new()),
1953 ))
1954 }
1955}
1956
1957impl Deref for QueryCursorHandle {
1958 type Target = QueryCursor;
1959
1960 fn deref(&self) -> &Self::Target {
1961 self.0.as_ref().unwrap()
1962 }
1963}
1964
1965impl DerefMut for QueryCursorHandle {
1966 fn deref_mut(&mut self) -> &mut Self::Target {
1967 self.0.as_mut().unwrap()
1968 }
1969}
1970
1971impl Drop for QueryCursorHandle {
1972 fn drop(&mut self) {
1973 let mut cursor = self.0.take().unwrap();
1974 cursor.set_byte_range(0..usize::MAX);
1975 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1976 QUERY_CURSORS.lock().push(cursor)
1977 }
1978}
1979
1980trait ToTreeSitterPoint {
1981 fn to_ts_point(self) -> tree_sitter::Point;
1982 fn from_ts_point(point: tree_sitter::Point) -> Self;
1983}
1984
1985impl ToTreeSitterPoint for Point {
1986 fn to_ts_point(self) -> tree_sitter::Point {
1987 tree_sitter::Point::new(self.row as usize, self.column as usize)
1988 }
1989
1990 fn from_ts_point(point: tree_sitter::Point) -> Self {
1991 Point::new(point.row as u32, point.column as u32)
1992 }
1993}
1994
1995trait ToPointUtf16 {
1996 fn to_point_utf16(self) -> PointUtf16;
1997}
1998
1999impl ToPointUtf16 for lsp::Position {
2000 fn to_point_utf16(self) -> PointUtf16 {
2001 PointUtf16::new(self.line, self.character)
2002 }
2003}
2004
2005impl operation_queue::Operation for Operation {
2006 fn lamport_timestamp(&self) -> clock::Lamport {
2007 match self {
2008 Operation::Buffer(_) => {
2009 unreachable!("buffer operations should never be deferred at this layer")
2010 }
2011 Operation::UpdateDiagnostics {
2012 lamport_timestamp, ..
2013 } => *lamport_timestamp,
2014 }
2015 }
2016}
2017
2018fn diagnostic_ranges<'a>(
2019 diagnostic: &'a lsp::Diagnostic,
2020 abs_path: Option<&'a Path>,
2021) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
2022 diagnostic
2023 .related_information
2024 .iter()
2025 .flatten()
2026 .filter_map(move |info| {
2027 if info.location.uri.to_file_path().ok()? == abs_path? {
2028 let info_start = PointUtf16::new(
2029 info.location.range.start.line,
2030 info.location.range.start.character,
2031 );
2032 let info_end = PointUtf16::new(
2033 info.location.range.end.line,
2034 info.location.range.end.character,
2035 );
2036 Some(info_start..info_end)
2037 } else {
2038 None
2039 }
2040 })
2041 .chain(Some(
2042 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
2043 ))
2044}
2045
2046pub fn contiguous_ranges(
2047 values: impl Iterator<Item = u32>,
2048 max_len: usize,
2049) -> impl Iterator<Item = Range<u32>> {
2050 let mut values = values.into_iter();
2051 let mut current_range: Option<Range<u32>> = None;
2052 std::iter::from_fn(move || loop {
2053 if let Some(value) = values.next() {
2054 if let Some(range) = &mut current_range {
2055 if value == range.end && range.len() < max_len {
2056 range.end += 1;
2057 continue;
2058 }
2059 }
2060
2061 let prev_range = current_range.clone();
2062 current_range = Some(value..(value + 1));
2063 if prev_range.is_some() {
2064 return prev_range;
2065 }
2066 } else {
2067 return current_range.take();
2068 }
2069 })
2070}