1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use text::operation_queue::OperationQueue;
34pub use text::{Buffer as TextBuffer, Operation as _, *};
35use theme::SyntaxTheme;
36use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
37use util::{post_inc, TryFutureExt as _};
38
39#[cfg(any(test, feature = "test-support"))]
40pub use tree_sitter_rust;
41
42pub use lsp::DiagnosticSeverity;
43
44thread_local! {
45 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
46}
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 diagnostics_update_count: usize,
69 language_server: Option<LanguageServerState>,
70 deferred_ops: OperationQueue<Operation>,
71 #[cfg(test)]
72 pub(crate) operations: Vec<Operation>,
73}
74
75pub struct BufferSnapshot {
76 text: text::BufferSnapshot,
77 tree: Option<Tree>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 is_parsing: bool,
81 language: Option<Arc<Language>>,
82 parse_count: usize,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86pub struct Diagnostic {
87 pub severity: DiagnosticSeverity,
88 pub message: String,
89 pub group_id: usize,
90 pub is_primary: bool,
91}
92
93struct LanguageServerState {
94 server: Arc<LanguageServer>,
95 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
96 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
97 next_version: usize,
98 _maintain_server: Task<Option<()>>,
99}
100
101#[derive(Clone)]
102struct LanguageServerSnapshot {
103 buffer_snapshot: text::BufferSnapshot,
104 version: usize,
105 path: Arc<Path>,
106}
107
108#[derive(Clone, Debug)]
109pub enum Operation {
110 Buffer(text::Operation),
111 UpdateDiagnostics {
112 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
113 lamport_timestamp: clock::Lamport,
114 },
115}
116
117#[derive(Clone, Debug, Eq, PartialEq)]
118pub enum Event {
119 Edited,
120 Dirtied,
121 Saved,
122 FileHandleChanged,
123 Reloaded,
124 Reparsed,
125 DiagnosticsUpdated,
126 Closed,
127}
128
129pub trait File {
130 fn worktree_id(&self) -> usize;
131
132 fn entry_id(&self) -> Option<usize>;
133
134 fn mtime(&self) -> SystemTime;
135
136 /// Returns the path of this file relative to the worktree's root directory.
137 fn path(&self) -> &Arc<Path>;
138
139 /// Returns the absolute path of this file.
140 fn abs_path(&self) -> Option<PathBuf>;
141
142 /// Returns the path of this file relative to the worktree's parent directory (this means it
143 /// includes the name of the worktree's root folder).
144 fn full_path(&self) -> PathBuf;
145
146 /// Returns the last component of this handle's absolute path. If this handle refers to the root
147 /// of its worktree, then this method will return the name of the worktree itself.
148 fn file_name(&self) -> Option<OsString>;
149
150 fn is_deleted(&self) -> bool;
151
152 fn save(
153 &self,
154 buffer_id: u64,
155 text: Rope,
156 version: clock::Global,
157 cx: &mut MutableAppContext,
158 ) -> Task<Result<(clock::Global, SystemTime)>>;
159
160 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
161
162 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
163
164 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
165
166 fn boxed_clone(&self) -> Box<dyn File>;
167
168 fn as_any(&self) -> &dyn Any;
169}
170
171struct QueryCursorHandle(Option<QueryCursor>);
172
173#[derive(Clone)]
174struct SyntaxTree {
175 tree: Tree,
176 version: clock::Global,
177}
178
179#[derive(Clone)]
180struct AutoindentRequest {
181 selection_set_ids: HashSet<SelectionSetId>,
182 before_edit: BufferSnapshot,
183 edited: Vec<Anchor>,
184 inserted: Option<Vec<Range<Anchor>>>,
185}
186
187#[derive(Debug)]
188struct IndentSuggestion {
189 basis_row: u32,
190 indent: bool,
191}
192
193struct TextProvider<'a>(&'a Rope);
194
195struct BufferChunkHighlights<'a> {
196 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
197 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
198 stack: Vec<(usize, HighlightId)>,
199 highlight_map: HighlightMap,
200 theme: &'a SyntaxTheme,
201 _query_cursor: QueryCursorHandle,
202}
203
204pub struct BufferChunks<'a> {
205 range: Range<usize>,
206 chunks: rope::Chunks<'a>,
207 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
208 error_depth: usize,
209 warning_depth: usize,
210 information_depth: usize,
211 hint_depth: usize,
212 highlights: Option<BufferChunkHighlights<'a>>,
213}
214
215#[derive(Clone, Copy, Debug, Default)]
216pub struct Chunk<'a> {
217 pub text: &'a str,
218 pub highlight_style: Option<HighlightStyle>,
219 pub diagnostic: Option<DiagnosticSeverity>,
220}
221
222pub(crate) struct Diff {
223 base_version: clock::Global,
224 new_text: Arc<str>,
225 changes: Vec<(ChangeTag, usize)>,
226}
227
228#[derive(Clone, Copy)]
229struct DiagnosticEndpoint {
230 offset: usize,
231 is_start: bool,
232 severity: DiagnosticSeverity,
233}
234
235impl Buffer {
236 pub fn new<T: Into<Arc<str>>>(
237 replica_id: ReplicaId,
238 base_text: T,
239 cx: &mut ModelContext<Self>,
240 ) -> Self {
241 Self::build(
242 TextBuffer::new(
243 replica_id,
244 cx.model_id() as u64,
245 History::new(base_text.into()),
246 ),
247 None,
248 )
249 }
250
251 pub fn from_file<T: Into<Arc<str>>>(
252 replica_id: ReplicaId,
253 base_text: T,
254 file: Box<dyn File>,
255 cx: &mut ModelContext<Self>,
256 ) -> Self {
257 Self::build(
258 TextBuffer::new(
259 replica_id,
260 cx.model_id() as u64,
261 History::new(base_text.into()),
262 ),
263 Some(file),
264 )
265 }
266
267 pub fn from_proto(
268 replica_id: ReplicaId,
269 message: proto::Buffer,
270 file: Option<Box<dyn File>>,
271 cx: &mut ModelContext<Self>,
272 ) -> Result<Self> {
273 let mut buffer =
274 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
275 let ops = message
276 .history
277 .into_iter()
278 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
279 buffer.apply_ops(ops)?;
280 for set in message.selections {
281 let set = proto::deserialize_selection_set(set);
282 buffer.add_raw_selection_set(set.id, set);
283 }
284 let mut this = Self::build(buffer, file);
285 this.apply_diagnostic_update(
286 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
287 cx,
288 );
289
290 Ok(this)
291 }
292
293 pub fn to_proto(&self) -> proto::Buffer {
294 proto::Buffer {
295 id: self.remote_id(),
296 content: self.text.base_text().to_string(),
297 history: self
298 .text
299 .history()
300 .map(proto::serialize_edit_operation)
301 .collect(),
302 selections: self
303 .selection_sets()
304 .map(|(_, set)| proto::serialize_selection_set(set))
305 .collect(),
306 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
307 }
308 }
309
310 pub fn with_language(
311 mut self,
312 language: Option<Arc<Language>>,
313 language_server: Option<Arc<LanguageServer>>,
314 cx: &mut ModelContext<Self>,
315 ) -> Self {
316 self.set_language(language, language_server, cx);
317 self
318 }
319
320 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
321 let saved_mtime;
322 if let Some(file) = file.as_ref() {
323 saved_mtime = file.mtime();
324 } else {
325 saved_mtime = UNIX_EPOCH;
326 }
327
328 Self {
329 saved_mtime,
330 saved_version: buffer.version(),
331 text: buffer,
332 file,
333 syntax_tree: Mutex::new(None),
334 parsing_in_background: false,
335 parse_count: 0,
336 sync_parse_timeout: Duration::from_millis(1),
337 autoindent_requests: Default::default(),
338 pending_autoindent: Default::default(),
339 language: None,
340 diagnostics: Default::default(),
341 diagnostics_update_count: 0,
342 language_server: None,
343 deferred_ops: OperationQueue::new(),
344 #[cfg(test)]
345 operations: Default::default(),
346 }
347 }
348
349 pub fn snapshot(&self) -> BufferSnapshot {
350 BufferSnapshot {
351 text: self.text.snapshot(),
352 tree: self.syntax_tree(),
353 diagnostics: self.diagnostics.clone(),
354 diagnostics_update_count: self.diagnostics_update_count,
355 is_parsing: self.parsing_in_background,
356 language: self.language.clone(),
357 parse_count: self.parse_count,
358 }
359 }
360
361 pub fn file(&self) -> Option<&dyn File> {
362 self.file.as_deref()
363 }
364
365 pub fn save(
366 &mut self,
367 cx: &mut ModelContext<Self>,
368 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
369 let file = self
370 .file
371 .as_ref()
372 .ok_or_else(|| anyhow!("buffer has no file"))?;
373 let text = self.as_rope().clone();
374 let version = self.version();
375 let save = file.save(self.remote_id(), text, version, cx.as_mut());
376 Ok(cx.spawn(|this, mut cx| async move {
377 let (version, mtime) = save.await?;
378 this.update(&mut cx, |this, cx| {
379 this.did_save(version.clone(), mtime, None, cx);
380 });
381 Ok((version, mtime))
382 }))
383 }
384
385 pub fn set_language(
386 &mut self,
387 language: Option<Arc<Language>>,
388 language_server: Option<Arc<lsp::LanguageServer>>,
389 cx: &mut ModelContext<Self>,
390 ) {
391 self.language = language;
392 self.language_server = if let Some(server) = language_server {
393 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
394 Some(LanguageServerState {
395 latest_snapshot: latest_snapshot_tx,
396 pending_snapshots: Default::default(),
397 next_version: 0,
398 server: server.clone(),
399 _maintain_server: cx.background().spawn(
400 async move {
401 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
402 while let Some(snapshot) = latest_snapshot_rx.recv().await {
403 if let Some(snapshot) = snapshot {
404 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
405 if let Some(prev_snapshot) = prev_snapshot {
406 let changes = lsp::DidChangeTextDocumentParams {
407 text_document: lsp::VersionedTextDocumentIdentifier::new(
408 uri,
409 snapshot.version as i32,
410 ),
411 content_changes: snapshot
412 .buffer_snapshot
413 .edits_since::<(PointUtf16, usize)>(
414 prev_snapshot.buffer_snapshot.version(),
415 )
416 .map(|edit| {
417 let edit_start = edit.new.start.0;
418 let edit_end = edit_start
419 + (edit.old.end.0 - edit.old.start.0);
420 let new_text = snapshot
421 .buffer_snapshot
422 .text_for_range(
423 edit.new.start.1..edit.new.end.1,
424 )
425 .collect();
426 lsp::TextDocumentContentChangeEvent {
427 range: Some(lsp::Range::new(
428 lsp::Position::new(
429 edit_start.row,
430 edit_start.column,
431 ),
432 lsp::Position::new(
433 edit_end.row,
434 edit_end.column,
435 ),
436 )),
437 range_length: None,
438 text: new_text,
439 }
440 })
441 .collect(),
442 };
443 server
444 .notify::<lsp::notification::DidChangeTextDocument>(changes)
445 .await?;
446 } else {
447 server
448 .notify::<lsp::notification::DidOpenTextDocument>(
449 lsp::DidOpenTextDocumentParams {
450 text_document: lsp::TextDocumentItem::new(
451 uri,
452 Default::default(),
453 snapshot.version as i32,
454 snapshot.buffer_snapshot.text().to_string(),
455 ),
456 },
457 )
458 .await?;
459 }
460
461 prev_snapshot = Some(snapshot);
462 }
463 }
464 Ok(())
465 }
466 .log_err(),
467 ),
468 })
469 } else {
470 None
471 };
472
473 self.reparse(cx);
474 self.update_language_server();
475 }
476
477 pub fn did_save(
478 &mut self,
479 version: clock::Global,
480 mtime: SystemTime,
481 new_file: Option<Box<dyn File>>,
482 cx: &mut ModelContext<Self>,
483 ) {
484 self.saved_mtime = mtime;
485 self.saved_version = version;
486 if let Some(new_file) = new_file {
487 self.file = Some(new_file);
488 }
489 if let Some(state) = &self.language_server {
490 cx.background()
491 .spawn(
492 state
493 .server
494 .notify::<lsp::notification::DidSaveTextDocument>(
495 lsp::DidSaveTextDocumentParams {
496 text_document: lsp::TextDocumentIdentifier {
497 uri: lsp::Url::from_file_path(
498 self.file.as_ref().unwrap().abs_path().unwrap(),
499 )
500 .unwrap(),
501 },
502 text: None,
503 },
504 ),
505 )
506 .detach()
507 }
508 cx.emit(Event::Saved);
509 }
510
511 pub fn file_updated(
512 &mut self,
513 new_file: Box<dyn File>,
514 cx: &mut ModelContext<Self>,
515 ) -> Option<Task<()>> {
516 let old_file = self.file.as_ref()?;
517 let mut file_changed = false;
518 let mut task = None;
519
520 if new_file.path() != old_file.path() {
521 file_changed = true;
522 }
523
524 if new_file.is_deleted() {
525 if !old_file.is_deleted() {
526 file_changed = true;
527 if !self.is_dirty() {
528 cx.emit(Event::Dirtied);
529 }
530 }
531 } else {
532 let new_mtime = new_file.mtime();
533 if new_mtime != old_file.mtime() {
534 file_changed = true;
535
536 if !self.is_dirty() {
537 task = Some(cx.spawn(|this, mut cx| {
538 async move {
539 let new_text = this.read_with(&cx, |this, cx| {
540 this.file.as_ref().and_then(|file| file.load_local(cx))
541 });
542 if let Some(new_text) = new_text {
543 let new_text = new_text.await?;
544 let diff = this
545 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
546 .await;
547 this.update(&mut cx, |this, cx| {
548 if this.apply_diff(diff, cx) {
549 this.saved_version = this.version();
550 this.saved_mtime = new_mtime;
551 cx.emit(Event::Reloaded);
552 }
553 });
554 }
555 Ok(())
556 }
557 .log_err()
558 .map(drop)
559 }));
560 }
561 }
562 }
563
564 if file_changed {
565 cx.emit(Event::FileHandleChanged);
566 }
567 self.file = Some(new_file);
568 task
569 }
570
571 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
572 cx.emit(Event::Closed);
573 }
574
575 pub fn language(&self) -> Option<&Arc<Language>> {
576 self.language.as_ref()
577 }
578
579 pub fn parse_count(&self) -> usize {
580 self.parse_count
581 }
582
583 pub fn diagnostics_update_count(&self) -> usize {
584 self.diagnostics_update_count
585 }
586
587 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
588 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
589 self.interpolate_tree(syntax_tree);
590 Some(syntax_tree.tree.clone())
591 } else {
592 None
593 }
594 }
595
596 #[cfg(any(test, feature = "test-support"))]
597 pub fn is_parsing(&self) -> bool {
598 self.parsing_in_background
599 }
600
601 #[cfg(test)]
602 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
603 self.sync_parse_timeout = timeout;
604 }
605
606 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
607 if self.parsing_in_background {
608 return false;
609 }
610
611 if let Some(grammar) = self.grammar().cloned() {
612 let old_tree = self.syntax_tree();
613 let text = self.as_rope().clone();
614 let parsed_version = self.version();
615 let parse_task = cx.background().spawn({
616 let grammar = grammar.clone();
617 async move { Self::parse_text(&text, old_tree, &grammar) }
618 });
619
620 match cx
621 .background()
622 .block_with_timeout(self.sync_parse_timeout, parse_task)
623 {
624 Ok(new_tree) => {
625 self.did_finish_parsing(new_tree, parsed_version, cx);
626 return true;
627 }
628 Err(parse_task) => {
629 self.parsing_in_background = true;
630 cx.spawn(move |this, mut cx| async move {
631 let new_tree = parse_task.await;
632 this.update(&mut cx, move |this, cx| {
633 let grammar_changed = this
634 .grammar()
635 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
636 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
637 this.parsing_in_background = false;
638 this.did_finish_parsing(new_tree, parsed_version, cx);
639
640 if parse_again && this.reparse(cx) {
641 return;
642 }
643 });
644 })
645 .detach();
646 }
647 }
648 }
649 false
650 }
651
652 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
653 PARSER.with(|parser| {
654 let mut parser = parser.borrow_mut();
655 parser
656 .set_language(grammar.ts_language)
657 .expect("incompatible grammar");
658 let mut chunks = text.chunks_in_range(0..text.len());
659 let tree = parser
660 .parse_with(
661 &mut move |offset, _| {
662 chunks.seek(offset);
663 chunks.next().unwrap_or("").as_bytes()
664 },
665 old_tree.as_ref(),
666 )
667 .unwrap();
668 tree
669 })
670 }
671
672 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
673 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
674 let (bytes, lines) = edit.flatten();
675 tree.tree.edit(&InputEdit {
676 start_byte: bytes.new.start,
677 old_end_byte: bytes.new.start + bytes.old.len(),
678 new_end_byte: bytes.new.end,
679 start_position: lines.new.start.to_ts_point(),
680 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
681 .to_ts_point(),
682 new_end_position: lines.new.end.to_ts_point(),
683 });
684 }
685 tree.version = self.version();
686 }
687
688 fn did_finish_parsing(
689 &mut self,
690 tree: Tree,
691 version: clock::Global,
692 cx: &mut ModelContext<Self>,
693 ) {
694 self.parse_count += 1;
695 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
696 self.request_autoindent(cx);
697 cx.emit(Event::Reparsed);
698 cx.notify();
699 }
700
701 pub fn update_diagnostics(
702 &mut self,
703 version: Option<i32>,
704 mut diagnostics: Vec<lsp::Diagnostic>,
705 cx: &mut ModelContext<Self>,
706 ) -> Result<Operation> {
707 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
708
709 let version = version.map(|version| version as usize);
710 let content = if let Some(version) = version {
711 let language_server = self.language_server.as_mut().unwrap();
712 let snapshot = language_server
713 .pending_snapshots
714 .get(&version)
715 .ok_or_else(|| anyhow!("missing snapshot"))?;
716 &snapshot.buffer_snapshot
717 } else {
718 self.deref()
719 };
720 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
721
722 let empty_set = HashSet::new();
723 let disk_based_sources = self
724 .language
725 .as_ref()
726 .and_then(|language| language.disk_based_diagnostic_sources())
727 .unwrap_or(&empty_set);
728
729 let mut edits_since_save = content
730 .edits_since::<PointUtf16>(&self.saved_version)
731 .peekable();
732 let mut last_edit_old_end = PointUtf16::zero();
733 let mut last_edit_new_end = PointUtf16::zero();
734 let mut group_ids_by_diagnostic_range = HashMap::new();
735 let mut diagnostics_by_group_id = HashMap::new();
736 let mut next_group_id = 0;
737 'outer: for diagnostic in &diagnostics {
738 let mut start = diagnostic.range.start.to_point_utf16();
739 let mut end = diagnostic.range.end.to_point_utf16();
740 let source = diagnostic.source.as_ref();
741 let code = diagnostic.code.as_ref();
742 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
743 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
744 .copied()
745 .unwrap_or_else(|| {
746 let group_id = post_inc(&mut next_group_id);
747 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
748 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
749 }
750 group_id
751 });
752
753 if diagnostic
754 .source
755 .as_ref()
756 .map_or(false, |source| disk_based_sources.contains(source))
757 {
758 while let Some(edit) = edits_since_save.peek() {
759 if edit.old.end <= start {
760 last_edit_old_end = edit.old.end;
761 last_edit_new_end = edit.new.end;
762 edits_since_save.next();
763 } else if edit.old.start <= end && edit.old.end >= start {
764 continue 'outer;
765 } else {
766 break;
767 }
768 }
769
770 start = last_edit_new_end + (start - last_edit_old_end);
771 end = last_edit_new_end + (end - last_edit_old_end);
772 }
773
774 let mut range = content.clip_point_utf16(start, Bias::Left)
775 ..content.clip_point_utf16(end, Bias::Right);
776 if range.start == range.end {
777 range.end.column += 1;
778 range.end = content.clip_point_utf16(range.end, Bias::Right);
779 if range.start == range.end && range.end.column > 0 {
780 range.start.column -= 1;
781 range.start = content.clip_point_utf16(range.start, Bias::Left);
782 }
783 }
784
785 diagnostics_by_group_id
786 .entry(group_id)
787 .or_insert(Vec::new())
788 .push(DiagnosticEntry {
789 range,
790 diagnostic: Diagnostic {
791 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
792 message: diagnostic.message.clone(),
793 group_id,
794 is_primary: false,
795 },
796 });
797 }
798
799 drop(edits_since_save);
800 let new_diagnostics = DiagnosticSet::new(
801 diagnostics_by_group_id
802 .into_values()
803 .flat_map(|mut diagnostics| {
804 let primary = diagnostics
805 .iter_mut()
806 .min_by_key(|entry| entry.diagnostic.severity)
807 .unwrap();
808 primary.diagnostic.is_primary = true;
809 diagnostics
810 }),
811 content,
812 );
813 self.diagnostics = new_diagnostics;
814
815 if let Some(version) = version {
816 let language_server = self.language_server.as_mut().unwrap();
817 let versions_to_delete = language_server
818 .pending_snapshots
819 .range(..version)
820 .map(|(v, _)| *v)
821 .collect::<Vec<_>>();
822 for version in versions_to_delete {
823 language_server.pending_snapshots.remove(&version);
824 }
825 }
826
827 self.diagnostics_update_count += 1;
828 cx.notify();
829 cx.emit(Event::DiagnosticsUpdated);
830 Ok(Operation::UpdateDiagnostics {
831 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
832 lamport_timestamp: self.lamport_timestamp(),
833 })
834 }
835
836 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
837 if let Some(indent_columns) = self.compute_autoindents() {
838 let indent_columns = cx.background().spawn(indent_columns);
839 match cx
840 .background()
841 .block_with_timeout(Duration::from_micros(500), indent_columns)
842 {
843 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
844 Err(indent_columns) => {
845 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
846 let indent_columns = indent_columns.await;
847 this.update(&mut cx, |this, cx| {
848 this.apply_autoindents(indent_columns, cx);
849 });
850 }));
851 }
852 }
853 }
854 }
855
856 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
857 let max_rows_between_yields = 100;
858 let snapshot = self.snapshot();
859 if snapshot.language.is_none()
860 || snapshot.tree.is_none()
861 || self.autoindent_requests.is_empty()
862 {
863 return None;
864 }
865
866 let autoindent_requests = self.autoindent_requests.clone();
867 Some(async move {
868 let mut indent_columns = BTreeMap::new();
869 for request in autoindent_requests {
870 let old_to_new_rows = request
871 .edited
872 .iter()
873 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
874 .zip(
875 request
876 .edited
877 .iter()
878 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
879 )
880 .collect::<BTreeMap<u32, u32>>();
881
882 let mut old_suggestions = HashMap::<u32, u32>::default();
883 let old_edited_ranges =
884 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
885 for old_edited_range in old_edited_ranges {
886 let suggestions = request
887 .before_edit
888 .suggest_autoindents(old_edited_range.clone())
889 .into_iter()
890 .flatten();
891 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
892 let indentation_basis = old_to_new_rows
893 .get(&suggestion.basis_row)
894 .and_then(|from_row| old_suggestions.get(from_row).copied())
895 .unwrap_or_else(|| {
896 request
897 .before_edit
898 .indent_column_for_line(suggestion.basis_row)
899 });
900 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
901 old_suggestions.insert(
902 *old_to_new_rows.get(&old_row).unwrap(),
903 indentation_basis + delta,
904 );
905 }
906 yield_now().await;
907 }
908
909 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
910 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
911 let new_edited_row_ranges =
912 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
913 for new_edited_row_range in new_edited_row_ranges {
914 let suggestions = snapshot
915 .suggest_autoindents(new_edited_row_range.clone())
916 .into_iter()
917 .flatten();
918 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
919 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
920 let new_indentation = indent_columns
921 .get(&suggestion.basis_row)
922 .copied()
923 .unwrap_or_else(|| {
924 snapshot.indent_column_for_line(suggestion.basis_row)
925 })
926 + delta;
927 if old_suggestions
928 .get(&new_row)
929 .map_or(true, |old_indentation| new_indentation != *old_indentation)
930 {
931 indent_columns.insert(new_row, new_indentation);
932 }
933 }
934 yield_now().await;
935 }
936
937 if let Some(inserted) = request.inserted.as_ref() {
938 let inserted_row_ranges = contiguous_ranges(
939 inserted
940 .iter()
941 .map(|range| range.to_point(&snapshot))
942 .flat_map(|range| range.start.row..range.end.row + 1),
943 max_rows_between_yields,
944 );
945 for inserted_row_range in inserted_row_ranges {
946 let suggestions = snapshot
947 .suggest_autoindents(inserted_row_range.clone())
948 .into_iter()
949 .flatten();
950 for (row, suggestion) in inserted_row_range.zip(suggestions) {
951 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
952 let new_indentation = indent_columns
953 .get(&suggestion.basis_row)
954 .copied()
955 .unwrap_or_else(|| {
956 snapshot.indent_column_for_line(suggestion.basis_row)
957 })
958 + delta;
959 indent_columns.insert(row, new_indentation);
960 }
961 yield_now().await;
962 }
963 }
964 }
965 indent_columns
966 })
967 }
968
969 fn apply_autoindents(
970 &mut self,
971 indent_columns: BTreeMap<u32, u32>,
972 cx: &mut ModelContext<Self>,
973 ) {
974 let selection_set_ids = self
975 .autoindent_requests
976 .drain(..)
977 .flat_map(|req| req.selection_set_ids.clone())
978 .collect::<HashSet<_>>();
979
980 self.start_transaction(selection_set_ids.iter().copied());
981 for (row, indent_column) in &indent_columns {
982 self.set_indent_column_for_line(*row, *indent_column, cx);
983 }
984
985 for selection_set_id in &selection_set_ids {
986 if let Ok(set) = self.selection_set(*selection_set_id) {
987 let new_selections = set
988 .selections::<Point>(&*self)
989 .map(|selection| {
990 if selection.start.column == 0 {
991 let delta = Point::new(
992 0,
993 indent_columns
994 .get(&selection.start.row)
995 .copied()
996 .unwrap_or(0),
997 );
998 if delta.column > 0 {
999 return Selection {
1000 id: selection.id,
1001 goal: selection.goal,
1002 reversed: selection.reversed,
1003 start: selection.start + delta,
1004 end: selection.end + delta,
1005 };
1006 }
1007 }
1008 selection
1009 })
1010 .collect::<Vec<_>>();
1011 self.update_selection_set(*selection_set_id, &new_selections, cx)
1012 .unwrap();
1013 }
1014 }
1015
1016 self.end_transaction(selection_set_ids.iter().copied(), cx);
1017 }
1018
1019 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1020 let current_column = self.indent_column_for_line(row);
1021 if column > current_column {
1022 let offset = Point::new(row, 0).to_offset(&*self);
1023 self.edit(
1024 [offset..offset],
1025 " ".repeat((column - current_column) as usize),
1026 cx,
1027 );
1028 } else if column < current_column {
1029 self.edit(
1030 [Point::new(row, 0)..Point::new(row, current_column - column)],
1031 "",
1032 cx,
1033 );
1034 }
1035 }
1036
1037 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1038 // TODO: it would be nice to not allocate here.
1039 let old_text = self.text();
1040 let base_version = self.version();
1041 cx.background().spawn(async move {
1042 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1043 .iter_all_changes()
1044 .map(|c| (c.tag(), c.value().len()))
1045 .collect::<Vec<_>>();
1046 Diff {
1047 base_version,
1048 new_text,
1049 changes,
1050 }
1051 })
1052 }
1053
1054 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1055 if self.version == diff.base_version {
1056 self.start_transaction(None);
1057 let mut offset = 0;
1058 for (tag, len) in diff.changes {
1059 let range = offset..(offset + len);
1060 match tag {
1061 ChangeTag::Equal => offset += len,
1062 ChangeTag::Delete => self.edit(Some(range), "", cx),
1063 ChangeTag::Insert => {
1064 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1065 offset += len;
1066 }
1067 }
1068 }
1069 self.end_transaction(None, cx);
1070 true
1071 } else {
1072 false
1073 }
1074 }
1075
1076 pub fn is_dirty(&self) -> bool {
1077 !self.saved_version.ge(&self.version)
1078 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1079 }
1080
1081 pub fn has_conflict(&self) -> bool {
1082 !self.saved_version.ge(&self.version)
1083 && self
1084 .file
1085 .as_ref()
1086 .map_or(false, |file| file.mtime() > self.saved_mtime)
1087 }
1088
1089 pub fn subscribe(&mut self) -> Subscription {
1090 self.text.subscribe()
1091 }
1092
1093 pub fn start_transaction(
1094 &mut self,
1095 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1096 ) -> Option<TransactionId> {
1097 self.start_transaction_at(selection_set_ids, Instant::now())
1098 }
1099
1100 pub(crate) fn start_transaction_at(
1101 &mut self,
1102 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1103 now: Instant,
1104 ) -> Option<TransactionId> {
1105 self.text.start_transaction_at(selection_set_ids, now)
1106 }
1107
1108 pub fn end_transaction(
1109 &mut self,
1110 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1111 cx: &mut ModelContext<Self>,
1112 ) -> Option<TransactionId> {
1113 self.end_transaction_at(selection_set_ids, Instant::now(), cx)
1114 }
1115
1116 pub(crate) fn end_transaction_at(
1117 &mut self,
1118 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1119 now: Instant,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<TransactionId> {
1122 if let Some((transaction_id, start_version)) =
1123 self.text.end_transaction_at(selection_set_ids, now)
1124 {
1125 let was_dirty = start_version != self.saved_version;
1126 self.did_edit(&start_version, was_dirty, cx);
1127 Some(transaction_id)
1128 } else {
1129 None
1130 }
1131 }
1132
1133 fn update_language_server(&mut self) {
1134 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1135 language_server
1136 } else {
1137 return;
1138 };
1139 let abs_path = self
1140 .file
1141 .as_ref()
1142 .map_or(Path::new("/").to_path_buf(), |file| {
1143 file.abs_path().unwrap()
1144 });
1145
1146 let version = post_inc(&mut language_server.next_version);
1147 let snapshot = LanguageServerSnapshot {
1148 buffer_snapshot: self.text.snapshot(),
1149 version,
1150 path: Arc::from(abs_path),
1151 };
1152 language_server
1153 .pending_snapshots
1154 .insert(version, snapshot.clone());
1155 let _ = language_server
1156 .latest_snapshot
1157 .blocking_send(Some(snapshot));
1158 }
1159
1160 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1161 where
1162 I: IntoIterator<Item = Range<S>>,
1163 S: ToOffset,
1164 T: Into<String>,
1165 {
1166 self.edit_internal(ranges_iter, new_text, false, cx)
1167 }
1168
1169 pub fn edit_with_autoindent<I, S, T>(
1170 &mut self,
1171 ranges_iter: I,
1172 new_text: T,
1173 cx: &mut ModelContext<Self>,
1174 ) where
1175 I: IntoIterator<Item = Range<S>>,
1176 S: ToOffset,
1177 T: Into<String>,
1178 {
1179 self.edit_internal(ranges_iter, new_text, true, cx)
1180 }
1181
1182 pub fn edit_internal<I, S, T>(
1183 &mut self,
1184 ranges_iter: I,
1185 new_text: T,
1186 autoindent: bool,
1187 cx: &mut ModelContext<Self>,
1188 ) where
1189 I: IntoIterator<Item = Range<S>>,
1190 S: ToOffset,
1191 T: Into<String>,
1192 {
1193 let new_text = new_text.into();
1194
1195 // Skip invalid ranges and coalesce contiguous ones.
1196 let mut ranges: Vec<Range<usize>> = Vec::new();
1197 for range in ranges_iter {
1198 let range = range.start.to_offset(self)..range.end.to_offset(self);
1199 if !new_text.is_empty() || !range.is_empty() {
1200 if let Some(prev_range) = ranges.last_mut() {
1201 if prev_range.end >= range.start {
1202 prev_range.end = cmp::max(prev_range.end, range.end);
1203 } else {
1204 ranges.push(range);
1205 }
1206 } else {
1207 ranges.push(range);
1208 }
1209 }
1210 }
1211 if ranges.is_empty() {
1212 return;
1213 }
1214
1215 self.start_transaction(None);
1216 self.pending_autoindent.take();
1217 let autoindent_request = if autoindent && self.language.is_some() {
1218 let before_edit = self.snapshot();
1219 let edited = ranges
1220 .iter()
1221 .filter_map(|range| {
1222 let start = range.start.to_point(self);
1223 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1224 None
1225 } else {
1226 Some(self.anchor_before(range.start))
1227 }
1228 })
1229 .collect();
1230 Some((before_edit, edited))
1231 } else {
1232 None
1233 };
1234
1235 let first_newline_ix = new_text.find('\n');
1236 let new_text_len = new_text.len();
1237
1238 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1239
1240 if let Some((before_edit, edited)) = autoindent_request {
1241 let mut inserted = None;
1242 if let Some(first_newline_ix) = first_newline_ix {
1243 let mut delta = 0isize;
1244 inserted = Some(
1245 ranges
1246 .iter()
1247 .map(|range| {
1248 let start =
1249 (delta + range.start as isize) as usize + first_newline_ix + 1;
1250 let end = (delta + range.start as isize) as usize + new_text_len;
1251 delta +=
1252 (range.end as isize - range.start as isize) + new_text_len as isize;
1253 self.anchor_before(start)..self.anchor_after(end)
1254 })
1255 .collect(),
1256 );
1257 }
1258
1259 let selection_set_ids = self
1260 .text
1261 .peek_undo_stack()
1262 .unwrap()
1263 .starting_selection_set_ids()
1264 .collect();
1265 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1266 selection_set_ids,
1267 before_edit,
1268 edited,
1269 inserted,
1270 }));
1271 }
1272
1273 self.end_transaction(None, cx);
1274 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1275 }
1276
1277 fn did_edit(
1278 &mut self,
1279 old_version: &clock::Global,
1280 was_dirty: bool,
1281 cx: &mut ModelContext<Self>,
1282 ) {
1283 if self.edits_since::<usize>(old_version).next().is_none() {
1284 return;
1285 }
1286
1287 self.reparse(cx);
1288 self.update_language_server();
1289
1290 cx.emit(Event::Edited);
1291 if !was_dirty {
1292 cx.emit(Event::Dirtied);
1293 }
1294 cx.notify();
1295 }
1296
1297 fn grammar(&self) -> Option<&Arc<Grammar>> {
1298 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1299 }
1300
1301 pub fn add_selection_set<T: ToOffset>(
1302 &mut self,
1303 selections: &[Selection<T>],
1304 cx: &mut ModelContext<Self>,
1305 ) -> SelectionSetId {
1306 let operation = self.text.add_selection_set(selections);
1307 if let text::Operation::UpdateSelections { set_id, .. } = &operation {
1308 let set_id = *set_id;
1309 cx.notify();
1310 self.send_operation(Operation::Buffer(operation), cx);
1311 set_id
1312 } else {
1313 unreachable!()
1314 }
1315 }
1316
1317 pub fn update_selection_set<T: ToOffset>(
1318 &mut self,
1319 set_id: SelectionSetId,
1320 selections: &[Selection<T>],
1321 cx: &mut ModelContext<Self>,
1322 ) -> Result<()> {
1323 let operation = self.text.update_selection_set(set_id, selections)?;
1324 cx.notify();
1325 self.send_operation(Operation::Buffer(operation), cx);
1326 Ok(())
1327 }
1328
1329 pub fn set_active_selection_set(
1330 &mut self,
1331 set_id: Option<SelectionSetId>,
1332 cx: &mut ModelContext<Self>,
1333 ) -> Result<()> {
1334 let operation = self.text.set_active_selection_set(set_id)?;
1335 self.send_operation(Operation::Buffer(operation), cx);
1336 Ok(())
1337 }
1338
1339 pub fn remove_selection_set(
1340 &mut self,
1341 set_id: SelectionSetId,
1342 cx: &mut ModelContext<Self>,
1343 ) -> Result<()> {
1344 let operation = self.text.remove_selection_set(set_id)?;
1345 cx.notify();
1346 self.send_operation(Operation::Buffer(operation), cx);
1347 Ok(())
1348 }
1349
1350 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1351 &mut self,
1352 ops: I,
1353 cx: &mut ModelContext<Self>,
1354 ) -> Result<()> {
1355 self.pending_autoindent.take();
1356 let was_dirty = self.is_dirty();
1357 let old_version = self.version.clone();
1358 let mut deferred_ops = Vec::new();
1359 let buffer_ops = ops
1360 .into_iter()
1361 .filter_map(|op| match op {
1362 Operation::Buffer(op) => Some(op),
1363 _ => {
1364 if self.can_apply_op(&op) {
1365 self.apply_op(op, cx);
1366 } else {
1367 deferred_ops.push(op);
1368 }
1369 None
1370 }
1371 })
1372 .collect::<Vec<_>>();
1373 self.text.apply_ops(buffer_ops)?;
1374 self.flush_deferred_ops(cx);
1375 self.did_edit(&old_version, was_dirty, cx);
1376 // Notify independently of whether the buffer was edited as the operations could include a
1377 // selection update.
1378 cx.notify();
1379 Ok(())
1380 }
1381
1382 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1383 let mut deferred_ops = Vec::new();
1384 for op in self.deferred_ops.drain().iter().cloned() {
1385 if self.can_apply_op(&op) {
1386 self.apply_op(op, cx);
1387 } else {
1388 deferred_ops.push(op);
1389 }
1390 }
1391 self.deferred_ops.insert(deferred_ops);
1392 }
1393
1394 fn can_apply_op(&self, operation: &Operation) -> bool {
1395 match operation {
1396 Operation::Buffer(_) => {
1397 unreachable!("buffer operations should never be applied at this layer")
1398 }
1399 Operation::UpdateDiagnostics { diagnostics, .. } => {
1400 diagnostics.iter().all(|diagnostic| {
1401 self.text.can_resolve(&diagnostic.range.start)
1402 && self.text.can_resolve(&diagnostic.range.end)
1403 })
1404 }
1405 }
1406 }
1407
1408 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1409 match operation {
1410 Operation::Buffer(_) => {
1411 unreachable!("buffer operations should never be applied at this layer")
1412 }
1413 Operation::UpdateDiagnostics { diagnostics, .. } => {
1414 self.apply_diagnostic_update(diagnostics, cx);
1415 }
1416 }
1417 }
1418
1419 fn apply_diagnostic_update(
1420 &mut self,
1421 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1422 cx: &mut ModelContext<Self>,
1423 ) {
1424 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1425 self.diagnostics_update_count += 1;
1426 cx.notify();
1427 }
1428
1429 #[cfg(not(test))]
1430 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1431 if let Some(file) = &self.file {
1432 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1433 }
1434 }
1435
1436 #[cfg(test)]
1437 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1438 self.operations.push(operation);
1439 }
1440
1441 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1442 self.text.remove_peer(replica_id);
1443 cx.notify();
1444 }
1445
1446 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1447 let was_dirty = self.is_dirty();
1448 let old_version = self.version.clone();
1449
1450 if let Some((transaction_id, operations)) = self.text.undo() {
1451 for operation in operations {
1452 self.send_operation(Operation::Buffer(operation), cx);
1453 }
1454 self.did_edit(&old_version, was_dirty, cx);
1455 Some(transaction_id)
1456 } else {
1457 None
1458 }
1459 }
1460
1461 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1462 let was_dirty = self.is_dirty();
1463 let old_version = self.version.clone();
1464
1465 if let Some((transaction_id, operations)) = self.text.redo() {
1466 for operation in operations {
1467 self.send_operation(Operation::Buffer(operation), cx);
1468 }
1469 self.did_edit(&old_version, was_dirty, cx);
1470 Some(transaction_id)
1471 } else {
1472 None
1473 }
1474 }
1475}
1476
1477#[cfg(any(test, feature = "test-support"))]
1478impl Buffer {
1479 pub fn randomly_edit<T>(
1480 &mut self,
1481 rng: &mut T,
1482 old_range_count: usize,
1483 cx: &mut ModelContext<Self>,
1484 ) where
1485 T: rand::Rng,
1486 {
1487 self.start_transaction(None);
1488 self.text.randomly_edit(rng, old_range_count);
1489 self.end_transaction(None, cx);
1490 }
1491
1492 pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
1493 where
1494 T: rand::Rng,
1495 {
1496 self.start_transaction(None);
1497 self.text.randomly_mutate(rng);
1498 self.end_transaction(None, cx);
1499 }
1500}
1501
1502impl Entity for Buffer {
1503 type Event = Event;
1504
1505 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1506 if let Some(file) = self.file.as_ref() {
1507 file.buffer_removed(self.remote_id(), cx);
1508 }
1509 }
1510}
1511
1512impl Deref for Buffer {
1513 type Target = TextBuffer;
1514
1515 fn deref(&self) -> &Self::Target {
1516 &self.text
1517 }
1518}
1519
1520impl BufferSnapshot {
1521 fn suggest_autoindents<'a>(
1522 &'a self,
1523 row_range: Range<u32>,
1524 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1525 let mut query_cursor = QueryCursorHandle::new();
1526 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1527 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1528
1529 // Get the "indentation ranges" that intersect this row range.
1530 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1531 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1532 query_cursor.set_point_range(
1533 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1534 ..Point::new(row_range.end, 0).to_ts_point(),
1535 );
1536 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1537 for mat in query_cursor.matches(
1538 &grammar.indents_query,
1539 tree.root_node(),
1540 TextProvider(self.as_rope()),
1541 ) {
1542 let mut node_kind = "";
1543 let mut start: Option<Point> = None;
1544 let mut end: Option<Point> = None;
1545 for capture in mat.captures {
1546 if Some(capture.index) == indent_capture_ix {
1547 node_kind = capture.node.kind();
1548 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1549 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1550 } else if Some(capture.index) == end_capture_ix {
1551 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1552 }
1553 }
1554
1555 if let Some((start, end)) = start.zip(end) {
1556 if start.row == end.row {
1557 continue;
1558 }
1559
1560 let range = start..end;
1561 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1562 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1563 Ok(ix) => {
1564 let prev_range = &mut indentation_ranges[ix];
1565 prev_range.0.end = prev_range.0.end.max(range.end);
1566 }
1567 }
1568 }
1569 }
1570
1571 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1572 Some(row_range.map(move |row| {
1573 let row_start = Point::new(row, self.indent_column_for_line(row));
1574
1575 let mut indent_from_prev_row = false;
1576 let mut outdent_to_row = u32::MAX;
1577 for (range, _node_kind) in &indentation_ranges {
1578 if range.start.row >= row {
1579 break;
1580 }
1581
1582 if range.start.row == prev_row && range.end > row_start {
1583 indent_from_prev_row = true;
1584 }
1585 if range.end.row >= prev_row && range.end <= row_start {
1586 outdent_to_row = outdent_to_row.min(range.start.row);
1587 }
1588 }
1589
1590 let suggestion = if outdent_to_row == prev_row {
1591 IndentSuggestion {
1592 basis_row: prev_row,
1593 indent: false,
1594 }
1595 } else if indent_from_prev_row {
1596 IndentSuggestion {
1597 basis_row: prev_row,
1598 indent: true,
1599 }
1600 } else if outdent_to_row < prev_row {
1601 IndentSuggestion {
1602 basis_row: outdent_to_row,
1603 indent: false,
1604 }
1605 } else {
1606 IndentSuggestion {
1607 basis_row: prev_row,
1608 indent: false,
1609 }
1610 };
1611
1612 prev_row = row;
1613 suggestion
1614 }))
1615 } else {
1616 None
1617 }
1618 }
1619
1620 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1621 while row > 0 {
1622 row -= 1;
1623 if !self.is_line_blank(row) {
1624 return Some(row);
1625 }
1626 }
1627 None
1628 }
1629
1630 pub fn chunks<'a, T: ToOffset>(
1631 &'a self,
1632 range: Range<T>,
1633 theme: Option<&'a SyntaxTheme>,
1634 ) -> BufferChunks<'a> {
1635 let range = range.start.to_offset(self)..range.end.to_offset(self);
1636
1637 let mut highlights = None;
1638 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1639 if let Some(theme) = theme {
1640 for entry in self
1641 .diagnostics
1642 .range::<_, usize>(range.clone(), self, true)
1643 {
1644 diagnostic_endpoints.push(DiagnosticEndpoint {
1645 offset: entry.range.start,
1646 is_start: true,
1647 severity: entry.diagnostic.severity,
1648 });
1649 diagnostic_endpoints.push(DiagnosticEndpoint {
1650 offset: entry.range.end,
1651 is_start: false,
1652 severity: entry.diagnostic.severity,
1653 });
1654 }
1655 diagnostic_endpoints
1656 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1657
1658 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1659 let mut query_cursor = QueryCursorHandle::new();
1660
1661 // TODO - add a Tree-sitter API to remove the need for this.
1662 let cursor = unsafe {
1663 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1664 };
1665 let captures = cursor.set_byte_range(range.clone()).captures(
1666 &grammar.highlights_query,
1667 tree.root_node(),
1668 TextProvider(self.text.as_rope()),
1669 );
1670 highlights = Some(BufferChunkHighlights {
1671 captures,
1672 next_capture: None,
1673 stack: Default::default(),
1674 highlight_map: grammar.highlight_map(),
1675 _query_cursor: query_cursor,
1676 theme,
1677 })
1678 }
1679 }
1680
1681 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1682 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1683
1684 BufferChunks {
1685 range,
1686 chunks,
1687 diagnostic_endpoints,
1688 error_depth: 0,
1689 warning_depth: 0,
1690 information_depth: 0,
1691 hint_depth: 0,
1692 highlights,
1693 }
1694 }
1695
1696 pub fn language(&self) -> Option<&Arc<Language>> {
1697 self.language.as_ref()
1698 }
1699
1700 fn grammar(&self) -> Option<&Arc<Grammar>> {
1701 self.language
1702 .as_ref()
1703 .and_then(|language| language.grammar.as_ref())
1704 }
1705
1706 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1707 if let Some(tree) = self.tree.as_ref() {
1708 let root = tree.root_node();
1709 let range = range.start.to_offset(self)..range.end.to_offset(self);
1710 let mut node = root.descendant_for_byte_range(range.start, range.end);
1711 while node.map_or(false, |n| n.byte_range() == range) {
1712 node = node.unwrap().parent();
1713 }
1714 node.map(|n| n.byte_range())
1715 } else {
1716 None
1717 }
1718 }
1719
1720 pub fn enclosing_bracket_ranges<T: ToOffset>(
1721 &self,
1722 range: Range<T>,
1723 ) -> Option<(Range<usize>, Range<usize>)> {
1724 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1725 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1726 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1727
1728 // Find bracket pairs that *inclusively* contain the given range.
1729 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1730 let mut cursor = QueryCursorHandle::new();
1731 let matches = cursor.set_byte_range(range).matches(
1732 &grammar.brackets_query,
1733 tree.root_node(),
1734 TextProvider(self.as_rope()),
1735 );
1736
1737 // Get the ranges of the innermost pair of brackets.
1738 matches
1739 .filter_map(|mat| {
1740 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1741 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1742 Some((open.byte_range(), close.byte_range()))
1743 })
1744 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1745 }
1746
1747 pub fn diagnostics_in_range<'a, T, O>(
1748 &'a self,
1749 search_range: Range<T>,
1750 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1751 where
1752 T: 'a + ToOffset,
1753 O: 'a + FromAnchor,
1754 {
1755 self.diagnostics.range(search_range, self, true)
1756 }
1757
1758 pub fn diagnostic_group<'a, O>(
1759 &'a self,
1760 group_id: usize,
1761 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1762 where
1763 O: 'a + FromAnchor,
1764 {
1765 self.diagnostics.group(group_id, self)
1766 }
1767
1768 pub fn diagnostics_update_count(&self) -> usize {
1769 self.diagnostics_update_count
1770 }
1771
1772 pub fn parse_count(&self) -> usize {
1773 self.parse_count
1774 }
1775}
1776
1777impl Clone for BufferSnapshot {
1778 fn clone(&self) -> Self {
1779 Self {
1780 text: self.text.clone(),
1781 tree: self.tree.clone(),
1782 diagnostics: self.diagnostics.clone(),
1783 diagnostics_update_count: self.diagnostics_update_count,
1784 is_parsing: self.is_parsing,
1785 language: self.language.clone(),
1786 parse_count: self.parse_count,
1787 }
1788 }
1789}
1790
1791impl Deref for BufferSnapshot {
1792 type Target = text::BufferSnapshot;
1793
1794 fn deref(&self) -> &Self::Target {
1795 &self.text
1796 }
1797}
1798
1799impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1800 type I = ByteChunks<'a>;
1801
1802 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1803 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1804 }
1805}
1806
1807struct ByteChunks<'a>(rope::Chunks<'a>);
1808
1809impl<'a> Iterator for ByteChunks<'a> {
1810 type Item = &'a [u8];
1811
1812 fn next(&mut self) -> Option<Self::Item> {
1813 self.0.next().map(str::as_bytes)
1814 }
1815}
1816
1817unsafe impl<'a> Send for BufferChunks<'a> {}
1818
1819impl<'a> BufferChunks<'a> {
1820 pub fn seek(&mut self, offset: usize) {
1821 self.range.start = offset;
1822 self.chunks.seek(self.range.start);
1823 if let Some(highlights) = self.highlights.as_mut() {
1824 highlights
1825 .stack
1826 .retain(|(end_offset, _)| *end_offset > offset);
1827 if let Some((mat, capture_ix)) = &highlights.next_capture {
1828 let capture = mat.captures[*capture_ix as usize];
1829 if offset >= capture.node.start_byte() {
1830 let next_capture_end = capture.node.end_byte();
1831 if offset < next_capture_end {
1832 highlights.stack.push((
1833 next_capture_end,
1834 highlights.highlight_map.get(capture.index),
1835 ));
1836 }
1837 highlights.next_capture.take();
1838 }
1839 }
1840 highlights.captures.set_byte_range(self.range.clone());
1841 }
1842 }
1843
1844 pub fn offset(&self) -> usize {
1845 self.range.start
1846 }
1847
1848 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1849 let depth = match endpoint.severity {
1850 DiagnosticSeverity::ERROR => &mut self.error_depth,
1851 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1852 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1853 DiagnosticSeverity::HINT => &mut self.hint_depth,
1854 _ => return,
1855 };
1856 if endpoint.is_start {
1857 *depth += 1;
1858 } else {
1859 *depth -= 1;
1860 }
1861 }
1862
1863 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1864 if self.error_depth > 0 {
1865 Some(DiagnosticSeverity::ERROR)
1866 } else if self.warning_depth > 0 {
1867 Some(DiagnosticSeverity::WARNING)
1868 } else if self.information_depth > 0 {
1869 Some(DiagnosticSeverity::INFORMATION)
1870 } else if self.hint_depth > 0 {
1871 Some(DiagnosticSeverity::HINT)
1872 } else {
1873 None
1874 }
1875 }
1876}
1877
1878impl<'a> Iterator for BufferChunks<'a> {
1879 type Item = Chunk<'a>;
1880
1881 fn next(&mut self) -> Option<Self::Item> {
1882 let mut next_capture_start = usize::MAX;
1883 let mut next_diagnostic_endpoint = usize::MAX;
1884
1885 if let Some(highlights) = self.highlights.as_mut() {
1886 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1887 if *parent_capture_end <= self.range.start {
1888 highlights.stack.pop();
1889 } else {
1890 break;
1891 }
1892 }
1893
1894 if highlights.next_capture.is_none() {
1895 highlights.next_capture = highlights.captures.next();
1896 }
1897
1898 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1899 let capture = mat.captures[*capture_ix as usize];
1900 if self.range.start < capture.node.start_byte() {
1901 next_capture_start = capture.node.start_byte();
1902 break;
1903 } else {
1904 let highlight_id = highlights.highlight_map.get(capture.index);
1905 highlights
1906 .stack
1907 .push((capture.node.end_byte(), highlight_id));
1908 highlights.next_capture = highlights.captures.next();
1909 }
1910 }
1911 }
1912
1913 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1914 if endpoint.offset <= self.range.start {
1915 self.update_diagnostic_depths(endpoint);
1916 self.diagnostic_endpoints.next();
1917 } else {
1918 next_diagnostic_endpoint = endpoint.offset;
1919 break;
1920 }
1921 }
1922
1923 if let Some(chunk) = self.chunks.peek() {
1924 let chunk_start = self.range.start;
1925 let mut chunk_end = (self.chunks.offset() + chunk.len())
1926 .min(next_capture_start)
1927 .min(next_diagnostic_endpoint);
1928 let mut highlight_style = None;
1929 if let Some(highlights) = self.highlights.as_ref() {
1930 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1931 chunk_end = chunk_end.min(*parent_capture_end);
1932 highlight_style = parent_highlight_id.style(highlights.theme);
1933 }
1934 }
1935
1936 let slice =
1937 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1938 self.range.start = chunk_end;
1939 if self.range.start == self.chunks.offset() + chunk.len() {
1940 self.chunks.next().unwrap();
1941 }
1942
1943 Some(Chunk {
1944 text: slice,
1945 highlight_style,
1946 diagnostic: self.current_diagnostic_severity(),
1947 })
1948 } else {
1949 None
1950 }
1951 }
1952}
1953
1954impl QueryCursorHandle {
1955 fn new() -> Self {
1956 QueryCursorHandle(Some(
1957 QUERY_CURSORS
1958 .lock()
1959 .pop()
1960 .unwrap_or_else(|| QueryCursor::new()),
1961 ))
1962 }
1963}
1964
1965impl Deref for QueryCursorHandle {
1966 type Target = QueryCursor;
1967
1968 fn deref(&self) -> &Self::Target {
1969 self.0.as_ref().unwrap()
1970 }
1971}
1972
1973impl DerefMut for QueryCursorHandle {
1974 fn deref_mut(&mut self) -> &mut Self::Target {
1975 self.0.as_mut().unwrap()
1976 }
1977}
1978
1979impl Drop for QueryCursorHandle {
1980 fn drop(&mut self) {
1981 let mut cursor = self.0.take().unwrap();
1982 cursor.set_byte_range(0..usize::MAX);
1983 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1984 QUERY_CURSORS.lock().push(cursor)
1985 }
1986}
1987
1988trait ToTreeSitterPoint {
1989 fn to_ts_point(self) -> tree_sitter::Point;
1990 fn from_ts_point(point: tree_sitter::Point) -> Self;
1991}
1992
1993impl ToTreeSitterPoint for Point {
1994 fn to_ts_point(self) -> tree_sitter::Point {
1995 tree_sitter::Point::new(self.row as usize, self.column as usize)
1996 }
1997
1998 fn from_ts_point(point: tree_sitter::Point) -> Self {
1999 Point::new(point.row as u32, point.column as u32)
2000 }
2001}
2002
2003trait ToPointUtf16 {
2004 fn to_point_utf16(self) -> PointUtf16;
2005}
2006
2007impl ToPointUtf16 for lsp::Position {
2008 fn to_point_utf16(self) -> PointUtf16 {
2009 PointUtf16::new(self.line, self.character)
2010 }
2011}
2012
2013impl operation_queue::Operation for Operation {
2014 fn lamport_timestamp(&self) -> clock::Lamport {
2015 match self {
2016 Operation::Buffer(_) => {
2017 unreachable!("buffer operations should never be deferred at this layer")
2018 }
2019 Operation::UpdateDiagnostics {
2020 lamport_timestamp, ..
2021 } => *lamport_timestamp,
2022 }
2023 }
2024}
2025
2026fn diagnostic_ranges<'a>(
2027 diagnostic: &'a lsp::Diagnostic,
2028 abs_path: Option<&'a Path>,
2029) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
2030 diagnostic
2031 .related_information
2032 .iter()
2033 .flatten()
2034 .filter_map(move |info| {
2035 if info.location.uri.to_file_path().ok()? == abs_path? {
2036 let info_start = PointUtf16::new(
2037 info.location.range.start.line,
2038 info.location.range.start.character,
2039 );
2040 let info_end = PointUtf16::new(
2041 info.location.range.end.line,
2042 info.location.range.end.character,
2043 );
2044 Some(info_start..info_end)
2045 } else {
2046 None
2047 }
2048 })
2049 .chain(Some(
2050 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
2051 ))
2052}
2053
2054pub fn contiguous_ranges(
2055 values: impl Iterator<Item = u32>,
2056 max_len: usize,
2057) -> impl Iterator<Item = Range<u32>> {
2058 let mut values = values.into_iter();
2059 let mut current_range: Option<Range<u32>> = None;
2060 std::iter::from_fn(move || loop {
2061 if let Some(value) = values.next() {
2062 if let Some(range) = &mut current_range {
2063 if value == range.end && range.len() < max_len {
2064 range.end += 1;
2065 continue;
2066 }
2067 }
2068
2069 let prev_range = current_range.clone();
2070 current_range = Some(value..(value + 1));
2071 if prev_range.is_some() {
2072 return prev_range;
2073 }
2074 } else {
2075 return current_range.take();
2076 }
2077 })
2078}