1use crate::diagnostic_set::{DiagnosticEntry, DiagnosticGroup};
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp::{self, Reverse},
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{self, Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use sum_tree::TreeMap;
34use text::operation_queue::OperationQueue;
35pub use text::{Buffer as TextBuffer, Operation as _, *};
36use theme::SyntaxTheme;
37use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
38use util::{post_inc, TryFutureExt as _};
39
40#[cfg(any(test, feature = "test-support"))]
41pub use tree_sitter_rust;
42
43pub use lsp::DiagnosticSeverity;
44
45thread_local! {
46 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
47}
48
49lazy_static! {
50 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
51}
52
53// TODO - Make this configurable
54const INDENT_SIZE: u32 = 4;
55
56pub struct Buffer {
57 text: TextBuffer,
58 file: Option<Box<dyn File>>,
59 saved_version: clock::Global,
60 saved_mtime: SystemTime,
61 language: Option<Arc<Language>>,
62 autoindent_requests: Vec<Arc<AutoindentRequest>>,
63 pending_autoindent: Option<Task<()>>,
64 sync_parse_timeout: Duration,
65 syntax_tree: Mutex<Option<SyntaxTree>>,
66 parsing_in_background: bool,
67 parse_count: usize,
68 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
69 diagnostics: DiagnosticSet,
70 diagnostics_update_count: usize,
71 language_server: Option<LanguageServerState>,
72 deferred_ops: OperationQueue<Operation>,
73 #[cfg(test)]
74 pub(crate) operations: Vec<Operation>,
75}
76
77pub struct BufferSnapshot {
78 text: text::BufferSnapshot,
79 tree: Option<Tree>,
80 diagnostics: DiagnosticSet,
81 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
82 diagnostics_update_count: usize,
83 is_parsing: bool,
84 language: Option<Arc<Language>>,
85 parse_count: usize,
86}
87
88#[derive(Clone, Debug, PartialEq, Eq)]
89pub struct Diagnostic {
90 pub source: Option<String>,
91 pub code: Option<String>,
92 pub severity: DiagnosticSeverity,
93 pub message: String,
94 pub group_id: usize,
95 pub is_valid: bool,
96 pub is_primary: bool,
97}
98
99struct LanguageServerState {
100 server: Arc<LanguageServer>,
101 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
102 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
103 next_version: usize,
104 _maintain_server: Task<Option<()>>,
105}
106
107#[derive(Clone)]
108struct LanguageServerSnapshot {
109 buffer_snapshot: text::BufferSnapshot,
110 version: usize,
111 path: Arc<Path>,
112}
113
114#[derive(Clone, Debug)]
115pub enum Operation {
116 Buffer(text::Operation),
117 UpdateDiagnostics {
118 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
119 lamport_timestamp: clock::Lamport,
120 },
121 UpdateSelections {
122 replica_id: ReplicaId,
123 selections: Arc<[Selection<Anchor>]>,
124 lamport_timestamp: clock::Lamport,
125 },
126 RemoveSelections {
127 replica_id: ReplicaId,
128 lamport_timestamp: clock::Lamport,
129 },
130}
131
132#[derive(Clone, Debug, Eq, PartialEq)]
133pub enum Event {
134 Edited,
135 Dirtied,
136 Saved,
137 FileHandleChanged,
138 Reloaded,
139 Reparsed,
140 DiagnosticsUpdated,
141 Closed,
142}
143
144pub trait File {
145 fn worktree_id(&self) -> usize;
146
147 fn entry_id(&self) -> Option<usize>;
148
149 fn mtime(&self) -> SystemTime;
150
151 /// Returns the path of this file relative to the worktree's root directory.
152 fn path(&self) -> &Arc<Path>;
153
154 /// Returns the absolute path of this file.
155 fn abs_path(&self) -> Option<PathBuf>;
156
157 /// Returns the path of this file relative to the worktree's parent directory (this means it
158 /// includes the name of the worktree's root folder).
159 fn full_path(&self) -> PathBuf;
160
161 /// Returns the last component of this handle's absolute path. If this handle refers to the root
162 /// of its worktree, then this method will return the name of the worktree itself.
163 fn file_name(&self) -> Option<OsString>;
164
165 fn is_deleted(&self) -> bool;
166
167 fn save(
168 &self,
169 buffer_id: u64,
170 text: Rope,
171 version: clock::Global,
172 cx: &mut MutableAppContext,
173 ) -> Task<Result<(clock::Global, SystemTime)>>;
174
175 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
176
177 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
178
179 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
180
181 fn boxed_clone(&self) -> Box<dyn File>;
182
183 fn as_any(&self) -> &dyn Any;
184}
185
186struct QueryCursorHandle(Option<QueryCursor>);
187
188#[derive(Clone)]
189struct SyntaxTree {
190 tree: Tree,
191 version: clock::Global,
192}
193
194#[derive(Clone)]
195struct AutoindentRequest {
196 before_edit: BufferSnapshot,
197 edited: Vec<Anchor>,
198 inserted: Option<Vec<Range<Anchor>>>,
199}
200
201#[derive(Debug)]
202struct IndentSuggestion {
203 basis_row: u32,
204 indent: bool,
205}
206
207struct TextProvider<'a>(&'a Rope);
208
209struct BufferChunkHighlights<'a> {
210 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
211 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
212 stack: Vec<(usize, HighlightId)>,
213 highlight_map: HighlightMap,
214 theme: &'a SyntaxTheme,
215 _query_cursor: QueryCursorHandle,
216}
217
218pub struct BufferChunks<'a> {
219 range: Range<usize>,
220 chunks: rope::Chunks<'a>,
221 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
222 error_depth: usize,
223 warning_depth: usize,
224 information_depth: usize,
225 hint_depth: usize,
226 highlights: Option<BufferChunkHighlights<'a>>,
227}
228
229#[derive(Clone, Copy, Debug, Default)]
230pub struct Chunk<'a> {
231 pub text: &'a str,
232 pub highlight_style: Option<HighlightStyle>,
233 pub diagnostic: Option<DiagnosticSeverity>,
234}
235
236pub(crate) struct Diff {
237 base_version: clock::Global,
238 new_text: Arc<str>,
239 changes: Vec<(ChangeTag, usize)>,
240}
241
242#[derive(Clone, Copy)]
243struct DiagnosticEndpoint {
244 offset: usize,
245 is_start: bool,
246 severity: DiagnosticSeverity,
247}
248
249impl Buffer {
250 pub fn new<T: Into<Arc<str>>>(
251 replica_id: ReplicaId,
252 base_text: T,
253 cx: &mut ModelContext<Self>,
254 ) -> Self {
255 Self::build(
256 TextBuffer::new(
257 replica_id,
258 cx.model_id() as u64,
259 History::new(base_text.into()),
260 ),
261 None,
262 )
263 }
264
265 pub fn from_file<T: Into<Arc<str>>>(
266 replica_id: ReplicaId,
267 base_text: T,
268 file: Box<dyn File>,
269 cx: &mut ModelContext<Self>,
270 ) -> Self {
271 Self::build(
272 TextBuffer::new(
273 replica_id,
274 cx.model_id() as u64,
275 History::new(base_text.into()),
276 ),
277 Some(file),
278 )
279 }
280
281 pub fn from_proto(
282 replica_id: ReplicaId,
283 message: proto::Buffer,
284 file: Option<Box<dyn File>>,
285 cx: &mut ModelContext<Self>,
286 ) -> Result<Self> {
287 let mut buffer =
288 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
289 let ops = message
290 .history
291 .into_iter()
292 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
293 buffer.apply_ops(ops)?;
294 let mut this = Self::build(buffer, file);
295 for selection_set in message.selections {
296 this.remote_selections.insert(
297 selection_set.replica_id as ReplicaId,
298 proto::deserialize_selections(selection_set.selections),
299 );
300 }
301 this.apply_diagnostic_update(
302 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
303 cx,
304 );
305
306 Ok(this)
307 }
308
309 pub fn to_proto(&self) -> proto::Buffer {
310 proto::Buffer {
311 id: self.remote_id(),
312 content: self.text.base_text().to_string(),
313 history: self
314 .text
315 .history()
316 .map(proto::serialize_edit_operation)
317 .collect(),
318 selections: self
319 .remote_selections
320 .iter()
321 .map(|(replica_id, selections)| proto::SelectionSet {
322 replica_id: *replica_id as u32,
323 selections: proto::serialize_selections(selections),
324 })
325 .collect(),
326 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
327 }
328 }
329
330 pub fn with_language(
331 mut self,
332 language: Option<Arc<Language>>,
333 language_server: Option<Arc<LanguageServer>>,
334 cx: &mut ModelContext<Self>,
335 ) -> Self {
336 self.set_language(language, language_server, cx);
337 self
338 }
339
340 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
341 let saved_mtime;
342 if let Some(file) = file.as_ref() {
343 saved_mtime = file.mtime();
344 } else {
345 saved_mtime = UNIX_EPOCH;
346 }
347
348 Self {
349 saved_mtime,
350 saved_version: buffer.version(),
351 text: buffer,
352 file,
353 syntax_tree: Mutex::new(None),
354 parsing_in_background: false,
355 parse_count: 0,
356 sync_parse_timeout: Duration::from_millis(1),
357 autoindent_requests: Default::default(),
358 pending_autoindent: Default::default(),
359 language: None,
360 remote_selections: Default::default(),
361 diagnostics: Default::default(),
362 diagnostics_update_count: 0,
363 language_server: None,
364 deferred_ops: OperationQueue::new(),
365 #[cfg(test)]
366 operations: Default::default(),
367 }
368 }
369
370 pub fn snapshot(&self) -> BufferSnapshot {
371 BufferSnapshot {
372 text: self.text.snapshot(),
373 tree: self.syntax_tree(),
374 remote_selections: self.remote_selections.clone(),
375 diagnostics: self.diagnostics.clone(),
376 diagnostics_update_count: self.diagnostics_update_count,
377 is_parsing: self.parsing_in_background,
378 language: self.language.clone(),
379 parse_count: self.parse_count,
380 }
381 }
382
383 pub fn file(&self) -> Option<&dyn File> {
384 self.file.as_deref()
385 }
386
387 pub fn save(
388 &mut self,
389 cx: &mut ModelContext<Self>,
390 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
391 let file = self
392 .file
393 .as_ref()
394 .ok_or_else(|| anyhow!("buffer has no file"))?;
395 let text = self.as_rope().clone();
396 let version = self.version();
397 let save = file.save(self.remote_id(), text, version, cx.as_mut());
398 Ok(cx.spawn(|this, mut cx| async move {
399 let (version, mtime) = save.await?;
400 this.update(&mut cx, |this, cx| {
401 this.did_save(version.clone(), mtime, None, cx);
402 });
403 Ok((version, mtime))
404 }))
405 }
406
407 pub fn set_language(
408 &mut self,
409 language: Option<Arc<Language>>,
410 language_server: Option<Arc<lsp::LanguageServer>>,
411 cx: &mut ModelContext<Self>,
412 ) {
413 self.language = language;
414 self.language_server = if let Some(server) = language_server {
415 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
416 Some(LanguageServerState {
417 latest_snapshot: latest_snapshot_tx,
418 pending_snapshots: Default::default(),
419 next_version: 0,
420 server: server.clone(),
421 _maintain_server: cx.background().spawn(
422 async move {
423 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
424 while let Some(snapshot) = latest_snapshot_rx.recv().await {
425 if let Some(snapshot) = snapshot {
426 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
427 if let Some(prev_snapshot) = prev_snapshot {
428 let changes = lsp::DidChangeTextDocumentParams {
429 text_document: lsp::VersionedTextDocumentIdentifier::new(
430 uri,
431 snapshot.version as i32,
432 ),
433 content_changes: snapshot
434 .buffer_snapshot
435 .edits_since::<(PointUtf16, usize)>(
436 prev_snapshot.buffer_snapshot.version(),
437 )
438 .map(|edit| {
439 let edit_start = edit.new.start.0;
440 let edit_end = edit_start
441 + (edit.old.end.0 - edit.old.start.0);
442 let new_text = snapshot
443 .buffer_snapshot
444 .text_for_range(
445 edit.new.start.1..edit.new.end.1,
446 )
447 .collect();
448 lsp::TextDocumentContentChangeEvent {
449 range: Some(lsp::Range::new(
450 lsp::Position::new(
451 edit_start.row,
452 edit_start.column,
453 ),
454 lsp::Position::new(
455 edit_end.row,
456 edit_end.column,
457 ),
458 )),
459 range_length: None,
460 text: new_text,
461 }
462 })
463 .collect(),
464 };
465 server
466 .notify::<lsp::notification::DidChangeTextDocument>(changes)
467 .await?;
468 } else {
469 server
470 .notify::<lsp::notification::DidOpenTextDocument>(
471 lsp::DidOpenTextDocumentParams {
472 text_document: lsp::TextDocumentItem::new(
473 uri,
474 Default::default(),
475 snapshot.version as i32,
476 snapshot.buffer_snapshot.text().to_string(),
477 ),
478 },
479 )
480 .await?;
481 }
482
483 prev_snapshot = Some(snapshot);
484 }
485 }
486 Ok(())
487 }
488 .log_err(),
489 ),
490 })
491 } else {
492 None
493 };
494
495 self.reparse(cx);
496 self.update_language_server();
497 }
498
499 pub fn did_save(
500 &mut self,
501 version: clock::Global,
502 mtime: SystemTime,
503 new_file: Option<Box<dyn File>>,
504 cx: &mut ModelContext<Self>,
505 ) {
506 self.saved_mtime = mtime;
507 self.saved_version = version;
508 if let Some(new_file) = new_file {
509 self.file = Some(new_file);
510 }
511 if let Some(state) = &self.language_server {
512 cx.background()
513 .spawn(
514 state
515 .server
516 .notify::<lsp::notification::DidSaveTextDocument>(
517 lsp::DidSaveTextDocumentParams {
518 text_document: lsp::TextDocumentIdentifier {
519 uri: lsp::Url::from_file_path(
520 self.file.as_ref().unwrap().abs_path().unwrap(),
521 )
522 .unwrap(),
523 },
524 text: None,
525 },
526 ),
527 )
528 .detach()
529 }
530 cx.emit(Event::Saved);
531 }
532
533 pub fn file_updated(
534 &mut self,
535 new_file: Box<dyn File>,
536 cx: &mut ModelContext<Self>,
537 ) -> Option<Task<()>> {
538 let old_file = self.file.as_ref()?;
539 let mut file_changed = false;
540 let mut task = None;
541
542 if new_file.path() != old_file.path() {
543 file_changed = true;
544 }
545
546 if new_file.is_deleted() {
547 if !old_file.is_deleted() {
548 file_changed = true;
549 if !self.is_dirty() {
550 cx.emit(Event::Dirtied);
551 }
552 }
553 } else {
554 let new_mtime = new_file.mtime();
555 if new_mtime != old_file.mtime() {
556 file_changed = true;
557
558 if !self.is_dirty() {
559 task = Some(cx.spawn(|this, mut cx| {
560 async move {
561 let new_text = this.read_with(&cx, |this, cx| {
562 this.file.as_ref().and_then(|file| file.load_local(cx))
563 });
564 if let Some(new_text) = new_text {
565 let new_text = new_text.await?;
566 let diff = this
567 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
568 .await;
569 this.update(&mut cx, |this, cx| {
570 if this.apply_diff(diff, cx) {
571 this.saved_version = this.version();
572 this.saved_mtime = new_mtime;
573 cx.emit(Event::Reloaded);
574 }
575 });
576 }
577 Ok(())
578 }
579 .log_err()
580 .map(drop)
581 }));
582 }
583 }
584 }
585
586 if file_changed {
587 cx.emit(Event::FileHandleChanged);
588 }
589 self.file = Some(new_file);
590 task
591 }
592
593 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
594 cx.emit(Event::Closed);
595 }
596
597 pub fn language(&self) -> Option<&Arc<Language>> {
598 self.language.as_ref()
599 }
600
601 pub fn parse_count(&self) -> usize {
602 self.parse_count
603 }
604
605 pub fn diagnostics_update_count(&self) -> usize {
606 self.diagnostics_update_count
607 }
608
609 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
610 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
611 self.interpolate_tree(syntax_tree);
612 Some(syntax_tree.tree.clone())
613 } else {
614 None
615 }
616 }
617
618 #[cfg(any(test, feature = "test-support"))]
619 pub fn is_parsing(&self) -> bool {
620 self.parsing_in_background
621 }
622
623 #[cfg(test)]
624 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
625 self.sync_parse_timeout = timeout;
626 }
627
628 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
629 if self.parsing_in_background {
630 return false;
631 }
632
633 if let Some(grammar) = self.grammar().cloned() {
634 let old_tree = self.syntax_tree();
635 let text = self.as_rope().clone();
636 let parsed_version = self.version();
637 let parse_task = cx.background().spawn({
638 let grammar = grammar.clone();
639 async move { Self::parse_text(&text, old_tree, &grammar) }
640 });
641
642 match cx
643 .background()
644 .block_with_timeout(self.sync_parse_timeout, parse_task)
645 {
646 Ok(new_tree) => {
647 self.did_finish_parsing(new_tree, parsed_version, cx);
648 return true;
649 }
650 Err(parse_task) => {
651 self.parsing_in_background = true;
652 cx.spawn(move |this, mut cx| async move {
653 let new_tree = parse_task.await;
654 this.update(&mut cx, move |this, cx| {
655 let grammar_changed = this
656 .grammar()
657 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
658 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
659 this.parsing_in_background = false;
660 this.did_finish_parsing(new_tree, parsed_version, cx);
661
662 if parse_again && this.reparse(cx) {
663 return;
664 }
665 });
666 })
667 .detach();
668 }
669 }
670 }
671 false
672 }
673
674 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
675 PARSER.with(|parser| {
676 let mut parser = parser.borrow_mut();
677 parser
678 .set_language(grammar.ts_language)
679 .expect("incompatible grammar");
680 let mut chunks = text.chunks_in_range(0..text.len());
681 let tree = parser
682 .parse_with(
683 &mut move |offset, _| {
684 chunks.seek(offset);
685 chunks.next().unwrap_or("").as_bytes()
686 },
687 old_tree.as_ref(),
688 )
689 .unwrap();
690 tree
691 })
692 }
693
694 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
695 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
696 let (bytes, lines) = edit.flatten();
697 tree.tree.edit(&InputEdit {
698 start_byte: bytes.new.start,
699 old_end_byte: bytes.new.start + bytes.old.len(),
700 new_end_byte: bytes.new.end,
701 start_position: lines.new.start.to_ts_point(),
702 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
703 .to_ts_point(),
704 new_end_position: lines.new.end.to_ts_point(),
705 });
706 }
707 tree.version = self.version();
708 }
709
710 fn did_finish_parsing(
711 &mut self,
712 tree: Tree,
713 version: clock::Global,
714 cx: &mut ModelContext<Self>,
715 ) {
716 self.parse_count += 1;
717 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
718 self.request_autoindent(cx);
719 cx.emit(Event::Reparsed);
720 cx.notify();
721 }
722
723 pub fn update_diagnostics(
724 &mut self,
725 version: Option<i32>,
726 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
727 cx: &mut ModelContext<Self>,
728 ) -> Result<Operation> {
729 diagnostics.sort_unstable_by_key(|d| (d.range.start, Reverse(d.range.end)));
730
731 let version = version.map(|version| version as usize);
732 let content = if let Some(version) = version {
733 let language_server = self.language_server.as_mut().unwrap();
734 language_server
735 .pending_snapshots
736 .retain(|&v, _| v >= version);
737 let snapshot = language_server
738 .pending_snapshots
739 .get(&version)
740 .ok_or_else(|| anyhow!("missing snapshot"))?;
741 &snapshot.buffer_snapshot
742 } else {
743 self.deref()
744 };
745
746 let empty_set = HashSet::new();
747 let disk_based_sources = self
748 .language
749 .as_ref()
750 .and_then(|language| language.disk_based_diagnostic_sources())
751 .unwrap_or(&empty_set);
752
753 let mut edits_since_save = content
754 .edits_since::<PointUtf16>(&self.saved_version)
755 .peekable();
756 let mut last_edit_old_end = PointUtf16::zero();
757 let mut last_edit_new_end = PointUtf16::zero();
758 let mut has_disk_based_diagnostics = false;
759 let mut ix = 0;
760 'outer: while ix < diagnostics.len() {
761 let entry = &mut diagnostics[ix];
762 let mut start = entry.range.start;
763 let mut end = entry.range.end;
764
765 // Some diagnostics are based on files on disk instead of buffers'
766 // current contents. Adjust these diagnostics' ranges to reflect
767 // any unsaved edits.
768 if entry
769 .diagnostic
770 .source
771 .as_ref()
772 .map_or(false, |source| disk_based_sources.contains(source))
773 {
774 has_disk_based_diagnostics = true;
775 while let Some(edit) = edits_since_save.peek() {
776 if edit.old.end <= start {
777 last_edit_old_end = edit.old.end;
778 last_edit_new_end = edit.new.end;
779 edits_since_save.next();
780 } else if edit.old.start <= end && edit.old.end >= start {
781 diagnostics.remove(ix);
782 continue 'outer;
783 } else {
784 break;
785 }
786 }
787
788 start = last_edit_new_end + (start - last_edit_old_end);
789 end = last_edit_new_end + (end - last_edit_old_end);
790 }
791
792 entry.range = content.clip_point_utf16(start, Bias::Left)
793 ..content.clip_point_utf16(end, Bias::Right);
794
795 // Expand empty ranges by one character
796 if entry.range.start == entry.range.end {
797 entry.range.end.column += 1;
798 entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right);
799 if entry.range.start == entry.range.end && entry.range.end.column > 0 {
800 entry.range.start.column -= 1;
801 entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left);
802 }
803 }
804 ix += 1;
805 }
806
807 drop(edits_since_save);
808
809 let diagnostics = diagnostics.into_iter().map(|entry| DiagnosticEntry {
810 range: content.anchor_before(entry.range.start)..content.anchor_after(entry.range.end),
811 diagnostic: entry.diagnostic,
812 });
813
814 // Some diagnostic sources are reported on a less frequent basis than others.
815 // If those sources are absent from this message, then preserve the previous
816 // diagnostics for those sources, but mark them as stale, and set a time to
817 // clear them out.
818 let mut merged_old_disk_based_diagnostics = false;
819 self.diagnostics = if has_disk_based_diagnostics {
820 DiagnosticSet::from_sorted_entries(diagnostics, content)
821 } else {
822 let mut new_diagnostics = diagnostics.peekable();
823 let mut old_diagnostics = self
824 .diagnostics
825 .iter()
826 .filter_map(|entry| {
827 let is_disk_based = entry
828 .diagnostic
829 .source
830 .as_ref()
831 .map_or(false, |source| disk_based_sources.contains(source));
832 if is_disk_based {
833 merged_old_disk_based_diagnostics = true;
834 let mut entry = entry.clone();
835 entry.diagnostic.is_valid = false;
836 Some(entry)
837 } else {
838 None
839 }
840 })
841 .peekable();
842 let merged_diagnostics =
843 iter::from_fn(|| match (old_diagnostics.peek(), new_diagnostics.peek()) {
844 (None, None) => None,
845 (Some(_), None) => old_diagnostics.next(),
846 (None, Some(_)) => new_diagnostics.next(),
847 (Some(old), Some(new)) => {
848 let ordering = old
849 .range
850 .start
851 .cmp(&new.range.start, content)
852 .unwrap()
853 .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap());
854 if ordering.is_lt() {
855 old_diagnostics.next()
856 } else {
857 new_diagnostics.next()
858 }
859 }
860 });
861 DiagnosticSet::from_sorted_entries(merged_diagnostics, content)
862 };
863
864 self.diagnostics_update_count += 1;
865 cx.notify();
866 cx.emit(Event::DiagnosticsUpdated);
867 Ok(Operation::UpdateDiagnostics {
868 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
869 lamport_timestamp: self.text.lamport_clock.tick(),
870 })
871 }
872
873 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
874 if let Some(indent_columns) = self.compute_autoindents() {
875 let indent_columns = cx.background().spawn(indent_columns);
876 match cx
877 .background()
878 .block_with_timeout(Duration::from_micros(500), indent_columns)
879 {
880 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
881 Err(indent_columns) => {
882 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
883 let indent_columns = indent_columns.await;
884 this.update(&mut cx, |this, cx| {
885 this.apply_autoindents(indent_columns, cx);
886 });
887 }));
888 }
889 }
890 }
891 }
892
893 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
894 let max_rows_between_yields = 100;
895 let snapshot = self.snapshot();
896 if snapshot.language.is_none()
897 || snapshot.tree.is_none()
898 || self.autoindent_requests.is_empty()
899 {
900 return None;
901 }
902
903 let autoindent_requests = self.autoindent_requests.clone();
904 Some(async move {
905 let mut indent_columns = BTreeMap::new();
906 for request in autoindent_requests {
907 let old_to_new_rows = request
908 .edited
909 .iter()
910 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
911 .zip(
912 request
913 .edited
914 .iter()
915 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
916 )
917 .collect::<BTreeMap<u32, u32>>();
918
919 let mut old_suggestions = HashMap::<u32, u32>::default();
920 let old_edited_ranges =
921 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
922 for old_edited_range in old_edited_ranges {
923 let suggestions = request
924 .before_edit
925 .suggest_autoindents(old_edited_range.clone())
926 .into_iter()
927 .flatten();
928 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
929 let indentation_basis = old_to_new_rows
930 .get(&suggestion.basis_row)
931 .and_then(|from_row| old_suggestions.get(from_row).copied())
932 .unwrap_or_else(|| {
933 request
934 .before_edit
935 .indent_column_for_line(suggestion.basis_row)
936 });
937 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
938 old_suggestions.insert(
939 *old_to_new_rows.get(&old_row).unwrap(),
940 indentation_basis + delta,
941 );
942 }
943 yield_now().await;
944 }
945
946 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
947 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
948 let new_edited_row_ranges =
949 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
950 for new_edited_row_range in new_edited_row_ranges {
951 let suggestions = snapshot
952 .suggest_autoindents(new_edited_row_range.clone())
953 .into_iter()
954 .flatten();
955 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
956 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
957 let new_indentation = indent_columns
958 .get(&suggestion.basis_row)
959 .copied()
960 .unwrap_or_else(|| {
961 snapshot.indent_column_for_line(suggestion.basis_row)
962 })
963 + delta;
964 if old_suggestions
965 .get(&new_row)
966 .map_or(true, |old_indentation| new_indentation != *old_indentation)
967 {
968 indent_columns.insert(new_row, new_indentation);
969 }
970 }
971 yield_now().await;
972 }
973
974 if let Some(inserted) = request.inserted.as_ref() {
975 let inserted_row_ranges = contiguous_ranges(
976 inserted
977 .iter()
978 .map(|range| range.to_point(&snapshot))
979 .flat_map(|range| range.start.row..range.end.row + 1),
980 max_rows_between_yields,
981 );
982 for inserted_row_range in inserted_row_ranges {
983 let suggestions = snapshot
984 .suggest_autoindents(inserted_row_range.clone())
985 .into_iter()
986 .flatten();
987 for (row, suggestion) in inserted_row_range.zip(suggestions) {
988 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
989 let new_indentation = indent_columns
990 .get(&suggestion.basis_row)
991 .copied()
992 .unwrap_or_else(|| {
993 snapshot.indent_column_for_line(suggestion.basis_row)
994 })
995 + delta;
996 indent_columns.insert(row, new_indentation);
997 }
998 yield_now().await;
999 }
1000 }
1001 }
1002 indent_columns
1003 })
1004 }
1005
1006 fn apply_autoindents(
1007 &mut self,
1008 indent_columns: BTreeMap<u32, u32>,
1009 cx: &mut ModelContext<Self>,
1010 ) {
1011 self.start_transaction();
1012 for (row, indent_column) in &indent_columns {
1013 self.set_indent_column_for_line(*row, *indent_column, cx);
1014 }
1015 self.end_transaction(cx);
1016 }
1017
1018 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1019 let current_column = self.indent_column_for_line(row);
1020 if column > current_column {
1021 let offset = Point::new(row, 0).to_offset(&*self);
1022 self.edit(
1023 [offset..offset],
1024 " ".repeat((column - current_column) as usize),
1025 cx,
1026 );
1027 } else if column < current_column {
1028 self.edit(
1029 [Point::new(row, 0)..Point::new(row, current_column - column)],
1030 "",
1031 cx,
1032 );
1033 }
1034 }
1035
1036 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1037 // TODO: it would be nice to not allocate here.
1038 let old_text = self.text();
1039 let base_version = self.version();
1040 cx.background().spawn(async move {
1041 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1042 .iter_all_changes()
1043 .map(|c| (c.tag(), c.value().len()))
1044 .collect::<Vec<_>>();
1045 Diff {
1046 base_version,
1047 new_text,
1048 changes,
1049 }
1050 })
1051 }
1052
1053 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1054 if self.version == diff.base_version {
1055 self.start_transaction();
1056 let mut offset = 0;
1057 for (tag, len) in diff.changes {
1058 let range = offset..(offset + len);
1059 match tag {
1060 ChangeTag::Equal => offset += len,
1061 ChangeTag::Delete => self.edit(Some(range), "", cx),
1062 ChangeTag::Insert => {
1063 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1064 offset += len;
1065 }
1066 }
1067 }
1068 self.end_transaction(cx);
1069 true
1070 } else {
1071 false
1072 }
1073 }
1074
1075 pub fn is_dirty(&self) -> bool {
1076 !self.saved_version.ge(&self.version)
1077 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1078 }
1079
1080 pub fn has_conflict(&self) -> bool {
1081 !self.saved_version.ge(&self.version)
1082 && self
1083 .file
1084 .as_ref()
1085 .map_or(false, |file| file.mtime() > self.saved_mtime)
1086 }
1087
1088 pub fn subscribe(&mut self) -> Subscription {
1089 self.text.subscribe()
1090 }
1091
1092 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1093 self.start_transaction_at(Instant::now())
1094 }
1095
1096 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1097 self.text.start_transaction_at(now)
1098 }
1099
1100 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1101 self.end_transaction_at(Instant::now(), cx)
1102 }
1103
1104 pub fn end_transaction_at(
1105 &mut self,
1106 now: Instant,
1107 cx: &mut ModelContext<Self>,
1108 ) -> Option<TransactionId> {
1109 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1110 let was_dirty = start_version != self.saved_version;
1111 self.did_edit(&start_version, was_dirty, cx);
1112 Some(transaction_id)
1113 } else {
1114 None
1115 }
1116 }
1117
1118 pub fn set_active_selections(
1119 &mut self,
1120 selections: Arc<[Selection<Anchor>]>,
1121 cx: &mut ModelContext<Self>,
1122 ) {
1123 let lamport_timestamp = self.text.lamport_clock.tick();
1124 self.remote_selections
1125 .insert(self.text.replica_id(), selections.clone());
1126 self.send_operation(
1127 Operation::UpdateSelections {
1128 replica_id: self.text.replica_id(),
1129 selections,
1130 lamport_timestamp,
1131 },
1132 cx,
1133 );
1134 }
1135
1136 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1137 let lamport_timestamp = self.text.lamport_clock.tick();
1138 self.send_operation(
1139 Operation::RemoveSelections {
1140 replica_id: self.text.replica_id(),
1141 lamport_timestamp,
1142 },
1143 cx,
1144 );
1145 }
1146
1147 fn update_language_server(&mut self) {
1148 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1149 language_server
1150 } else {
1151 return;
1152 };
1153 let abs_path = self
1154 .file
1155 .as_ref()
1156 .map_or(Path::new("/").to_path_buf(), |file| {
1157 file.abs_path().unwrap()
1158 });
1159
1160 let version = post_inc(&mut language_server.next_version);
1161 let snapshot = LanguageServerSnapshot {
1162 buffer_snapshot: self.text.snapshot(),
1163 version,
1164 path: Arc::from(abs_path),
1165 };
1166 language_server
1167 .pending_snapshots
1168 .insert(version, snapshot.clone());
1169 let _ = language_server
1170 .latest_snapshot
1171 .blocking_send(Some(snapshot));
1172 }
1173
1174 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1175 where
1176 I: IntoIterator<Item = Range<S>>,
1177 S: ToOffset,
1178 T: Into<String>,
1179 {
1180 self.edit_internal(ranges_iter, new_text, false, cx)
1181 }
1182
1183 pub fn edit_with_autoindent<I, S, T>(
1184 &mut self,
1185 ranges_iter: I,
1186 new_text: T,
1187 cx: &mut ModelContext<Self>,
1188 ) where
1189 I: IntoIterator<Item = Range<S>>,
1190 S: ToOffset,
1191 T: Into<String>,
1192 {
1193 self.edit_internal(ranges_iter, new_text, true, cx)
1194 }
1195
1196 pub fn edit_internal<I, S, T>(
1197 &mut self,
1198 ranges_iter: I,
1199 new_text: T,
1200 autoindent: bool,
1201 cx: &mut ModelContext<Self>,
1202 ) where
1203 I: IntoIterator<Item = Range<S>>,
1204 S: ToOffset,
1205 T: Into<String>,
1206 {
1207 let new_text = new_text.into();
1208
1209 // Skip invalid ranges and coalesce contiguous ones.
1210 let mut ranges: Vec<Range<usize>> = Vec::new();
1211 for range in ranges_iter {
1212 let range = range.start.to_offset(self)..range.end.to_offset(self);
1213 if !new_text.is_empty() || !range.is_empty() {
1214 if let Some(prev_range) = ranges.last_mut() {
1215 if prev_range.end >= range.start {
1216 prev_range.end = cmp::max(prev_range.end, range.end);
1217 } else {
1218 ranges.push(range);
1219 }
1220 } else {
1221 ranges.push(range);
1222 }
1223 }
1224 }
1225 if ranges.is_empty() {
1226 return;
1227 }
1228
1229 self.start_transaction();
1230 self.pending_autoindent.take();
1231 let autoindent_request = if autoindent && self.language.is_some() {
1232 let before_edit = self.snapshot();
1233 let edited = ranges
1234 .iter()
1235 .filter_map(|range| {
1236 let start = range.start.to_point(self);
1237 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1238 None
1239 } else {
1240 Some(self.anchor_before(range.start))
1241 }
1242 })
1243 .collect();
1244 Some((before_edit, edited))
1245 } else {
1246 None
1247 };
1248
1249 let first_newline_ix = new_text.find('\n');
1250 let new_text_len = new_text.len();
1251
1252 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1253
1254 if let Some((before_edit, edited)) = autoindent_request {
1255 let mut inserted = None;
1256 if let Some(first_newline_ix) = first_newline_ix {
1257 let mut delta = 0isize;
1258 inserted = Some(
1259 ranges
1260 .iter()
1261 .map(|range| {
1262 let start =
1263 (delta + range.start as isize) as usize + first_newline_ix + 1;
1264 let end = (delta + range.start as isize) as usize + new_text_len;
1265 delta +=
1266 (range.end as isize - range.start as isize) + new_text_len as isize;
1267 self.anchor_before(start)..self.anchor_after(end)
1268 })
1269 .collect(),
1270 );
1271 }
1272
1273 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1274 before_edit,
1275 edited,
1276 inserted,
1277 }));
1278 }
1279
1280 self.end_transaction(cx);
1281 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1282 }
1283
1284 fn did_edit(
1285 &mut self,
1286 old_version: &clock::Global,
1287 was_dirty: bool,
1288 cx: &mut ModelContext<Self>,
1289 ) {
1290 if self.edits_since::<usize>(old_version).next().is_none() {
1291 return;
1292 }
1293
1294 self.reparse(cx);
1295 self.update_language_server();
1296
1297 cx.emit(Event::Edited);
1298 if !was_dirty {
1299 cx.emit(Event::Dirtied);
1300 }
1301 cx.notify();
1302 }
1303
1304 fn grammar(&self) -> Option<&Arc<Grammar>> {
1305 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1306 }
1307
1308 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1309 &mut self,
1310 ops: I,
1311 cx: &mut ModelContext<Self>,
1312 ) -> Result<()> {
1313 self.pending_autoindent.take();
1314 let was_dirty = self.is_dirty();
1315 let old_version = self.version.clone();
1316 let mut deferred_ops = Vec::new();
1317 let buffer_ops = ops
1318 .into_iter()
1319 .filter_map(|op| match op {
1320 Operation::Buffer(op) => Some(op),
1321 _ => {
1322 if self.can_apply_op(&op) {
1323 self.apply_op(op, cx);
1324 } else {
1325 deferred_ops.push(op);
1326 }
1327 None
1328 }
1329 })
1330 .collect::<Vec<_>>();
1331 self.text.apply_ops(buffer_ops)?;
1332 self.flush_deferred_ops(cx);
1333 self.did_edit(&old_version, was_dirty, cx);
1334 // Notify independently of whether the buffer was edited as the operations could include a
1335 // selection update.
1336 cx.notify();
1337 Ok(())
1338 }
1339
1340 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1341 let mut deferred_ops = Vec::new();
1342 for op in self.deferred_ops.drain().iter().cloned() {
1343 if self.can_apply_op(&op) {
1344 self.apply_op(op, cx);
1345 } else {
1346 deferred_ops.push(op);
1347 }
1348 }
1349 self.deferred_ops.insert(deferred_ops);
1350 }
1351
1352 fn can_apply_op(&self, operation: &Operation) -> bool {
1353 match operation {
1354 Operation::Buffer(_) => {
1355 unreachable!("buffer operations should never be applied at this layer")
1356 }
1357 Operation::UpdateDiagnostics { diagnostics, .. } => {
1358 diagnostics.iter().all(|diagnostic| {
1359 self.text.can_resolve(&diagnostic.range.start)
1360 && self.text.can_resolve(&diagnostic.range.end)
1361 })
1362 }
1363 Operation::UpdateSelections { selections, .. } => selections
1364 .iter()
1365 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1366 Operation::RemoveSelections { .. } => true,
1367 }
1368 }
1369
1370 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1371 match operation {
1372 Operation::Buffer(_) => {
1373 unreachable!("buffer operations should never be applied at this layer")
1374 }
1375 Operation::UpdateDiagnostics { diagnostics, .. } => {
1376 self.apply_diagnostic_update(diagnostics, cx);
1377 }
1378 Operation::UpdateSelections {
1379 replica_id,
1380 selections,
1381 lamport_timestamp,
1382 } => {
1383 self.remote_selections.insert(replica_id, selections);
1384 self.text.lamport_clock.observe(lamport_timestamp);
1385 }
1386 Operation::RemoveSelections {
1387 replica_id,
1388 lamport_timestamp,
1389 } => {
1390 self.remote_selections.remove(&replica_id);
1391 self.text.lamport_clock.observe(lamport_timestamp);
1392 }
1393 }
1394 }
1395
1396 fn apply_diagnostic_update(
1397 &mut self,
1398 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1399 cx: &mut ModelContext<Self>,
1400 ) {
1401 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1402 self.diagnostics_update_count += 1;
1403 cx.notify();
1404 }
1405
1406 #[cfg(not(test))]
1407 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1408 if let Some(file) = &self.file {
1409 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1410 }
1411 }
1412
1413 #[cfg(test)]
1414 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1415 self.operations.push(operation);
1416 }
1417
1418 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1419 self.remote_selections.remove(&replica_id);
1420 cx.notify();
1421 }
1422
1423 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1424 let was_dirty = self.is_dirty();
1425 let old_version = self.version.clone();
1426
1427 if let Some((transaction_id, operation)) = self.text.undo() {
1428 self.send_operation(Operation::Buffer(operation), cx);
1429 self.did_edit(&old_version, was_dirty, cx);
1430 Some(transaction_id)
1431 } else {
1432 None
1433 }
1434 }
1435
1436 pub fn undo_transaction(
1437 &mut self,
1438 transaction_id: TransactionId,
1439 cx: &mut ModelContext<Self>,
1440 ) -> bool {
1441 let was_dirty = self.is_dirty();
1442 let old_version = self.version.clone();
1443
1444 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1445 self.send_operation(Operation::Buffer(operation), cx);
1446 self.did_edit(&old_version, was_dirty, cx);
1447 true
1448 } else {
1449 false
1450 }
1451 }
1452
1453 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1454 let was_dirty = self.is_dirty();
1455 let old_version = self.version.clone();
1456
1457 if let Some((transaction_id, operation)) = self.text.redo() {
1458 self.send_operation(Operation::Buffer(operation), cx);
1459 self.did_edit(&old_version, was_dirty, cx);
1460 Some(transaction_id)
1461 } else {
1462 None
1463 }
1464 }
1465
1466 pub fn redo_transaction(
1467 &mut self,
1468 transaction_id: TransactionId,
1469 cx: &mut ModelContext<Self>,
1470 ) -> bool {
1471 let was_dirty = self.is_dirty();
1472 let old_version = self.version.clone();
1473
1474 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1475 self.send_operation(Operation::Buffer(operation), cx);
1476 self.did_edit(&old_version, was_dirty, cx);
1477 true
1478 } else {
1479 false
1480 }
1481 }
1482}
1483
1484#[cfg(any(test, feature = "test-support"))]
1485impl Buffer {
1486 pub fn randomly_edit<T>(
1487 &mut self,
1488 rng: &mut T,
1489 old_range_count: usize,
1490 cx: &mut ModelContext<Self>,
1491 ) where
1492 T: rand::Rng,
1493 {
1494 self.start_transaction();
1495 self.text.randomly_edit(rng, old_range_count);
1496 self.end_transaction(cx);
1497 }
1498}
1499
1500impl Entity for Buffer {
1501 type Event = Event;
1502
1503 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1504 if let Some(file) = self.file.as_ref() {
1505 file.buffer_removed(self.remote_id(), cx);
1506 }
1507 }
1508}
1509
1510impl Deref for Buffer {
1511 type Target = TextBuffer;
1512
1513 fn deref(&self) -> &Self::Target {
1514 &self.text
1515 }
1516}
1517
1518impl BufferSnapshot {
1519 fn suggest_autoindents<'a>(
1520 &'a self,
1521 row_range: Range<u32>,
1522 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1523 let mut query_cursor = QueryCursorHandle::new();
1524 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1525 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1526
1527 // Get the "indentation ranges" that intersect this row range.
1528 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1529 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1530 query_cursor.set_point_range(
1531 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1532 ..Point::new(row_range.end, 0).to_ts_point(),
1533 );
1534 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1535 for mat in query_cursor.matches(
1536 &grammar.indents_query,
1537 tree.root_node(),
1538 TextProvider(self.as_rope()),
1539 ) {
1540 let mut node_kind = "";
1541 let mut start: Option<Point> = None;
1542 let mut end: Option<Point> = None;
1543 for capture in mat.captures {
1544 if Some(capture.index) == indent_capture_ix {
1545 node_kind = capture.node.kind();
1546 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1547 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1548 } else if Some(capture.index) == end_capture_ix {
1549 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1550 }
1551 }
1552
1553 if let Some((start, end)) = start.zip(end) {
1554 if start.row == end.row {
1555 continue;
1556 }
1557
1558 let range = start..end;
1559 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1560 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1561 Ok(ix) => {
1562 let prev_range = &mut indentation_ranges[ix];
1563 prev_range.0.end = prev_range.0.end.max(range.end);
1564 }
1565 }
1566 }
1567 }
1568
1569 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1570 Some(row_range.map(move |row| {
1571 let row_start = Point::new(row, self.indent_column_for_line(row));
1572
1573 let mut indent_from_prev_row = false;
1574 let mut outdent_to_row = u32::MAX;
1575 for (range, _node_kind) in &indentation_ranges {
1576 if range.start.row >= row {
1577 break;
1578 }
1579
1580 if range.start.row == prev_row && range.end > row_start {
1581 indent_from_prev_row = true;
1582 }
1583 if range.end.row >= prev_row && range.end <= row_start {
1584 outdent_to_row = outdent_to_row.min(range.start.row);
1585 }
1586 }
1587
1588 let suggestion = if outdent_to_row == prev_row {
1589 IndentSuggestion {
1590 basis_row: prev_row,
1591 indent: false,
1592 }
1593 } else if indent_from_prev_row {
1594 IndentSuggestion {
1595 basis_row: prev_row,
1596 indent: true,
1597 }
1598 } else if outdent_to_row < prev_row {
1599 IndentSuggestion {
1600 basis_row: outdent_to_row,
1601 indent: false,
1602 }
1603 } else {
1604 IndentSuggestion {
1605 basis_row: prev_row,
1606 indent: false,
1607 }
1608 };
1609
1610 prev_row = row;
1611 suggestion
1612 }))
1613 } else {
1614 None
1615 }
1616 }
1617
1618 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1619 while row > 0 {
1620 row -= 1;
1621 if !self.is_line_blank(row) {
1622 return Some(row);
1623 }
1624 }
1625 None
1626 }
1627
1628 pub fn chunks<'a, T: ToOffset>(
1629 &'a self,
1630 range: Range<T>,
1631 theme: Option<&'a SyntaxTheme>,
1632 ) -> BufferChunks<'a> {
1633 let range = range.start.to_offset(self)..range.end.to_offset(self);
1634
1635 let mut highlights = None;
1636 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1637 if let Some(theme) = theme {
1638 for entry in self
1639 .diagnostics
1640 .range::<_, usize>(range.clone(), self, true)
1641 {
1642 diagnostic_endpoints.push(DiagnosticEndpoint {
1643 offset: entry.range.start,
1644 is_start: true,
1645 severity: entry.diagnostic.severity,
1646 });
1647 diagnostic_endpoints.push(DiagnosticEndpoint {
1648 offset: entry.range.end,
1649 is_start: false,
1650 severity: entry.diagnostic.severity,
1651 });
1652 }
1653 diagnostic_endpoints
1654 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1655
1656 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1657 let mut query_cursor = QueryCursorHandle::new();
1658
1659 // TODO - add a Tree-sitter API to remove the need for this.
1660 let cursor = unsafe {
1661 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1662 };
1663 let captures = cursor.set_byte_range(range.clone()).captures(
1664 &grammar.highlights_query,
1665 tree.root_node(),
1666 TextProvider(self.text.as_rope()),
1667 );
1668 highlights = Some(BufferChunkHighlights {
1669 captures,
1670 next_capture: None,
1671 stack: Default::default(),
1672 highlight_map: grammar.highlight_map(),
1673 _query_cursor: query_cursor,
1674 theme,
1675 })
1676 }
1677 }
1678
1679 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1680 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1681
1682 BufferChunks {
1683 range,
1684 chunks,
1685 diagnostic_endpoints,
1686 error_depth: 0,
1687 warning_depth: 0,
1688 information_depth: 0,
1689 hint_depth: 0,
1690 highlights,
1691 }
1692 }
1693
1694 pub fn language(&self) -> Option<&Arc<Language>> {
1695 self.language.as_ref()
1696 }
1697
1698 fn grammar(&self) -> Option<&Arc<Grammar>> {
1699 self.language
1700 .as_ref()
1701 .and_then(|language| language.grammar.as_ref())
1702 }
1703
1704 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1705 if let Some(tree) = self.tree.as_ref() {
1706 let root = tree.root_node();
1707 let range = range.start.to_offset(self)..range.end.to_offset(self);
1708 let mut node = root.descendant_for_byte_range(range.start, range.end);
1709 while node.map_or(false, |n| n.byte_range() == range) {
1710 node = node.unwrap().parent();
1711 }
1712 node.map(|n| n.byte_range())
1713 } else {
1714 None
1715 }
1716 }
1717
1718 pub fn enclosing_bracket_ranges<T: ToOffset>(
1719 &self,
1720 range: Range<T>,
1721 ) -> Option<(Range<usize>, Range<usize>)> {
1722 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1723 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1724 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1725
1726 // Find bracket pairs that *inclusively* contain the given range.
1727 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1728 let mut cursor = QueryCursorHandle::new();
1729 let matches = cursor.set_byte_range(range).matches(
1730 &grammar.brackets_query,
1731 tree.root_node(),
1732 TextProvider(self.as_rope()),
1733 );
1734
1735 // Get the ranges of the innermost pair of brackets.
1736 matches
1737 .filter_map(|mat| {
1738 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1739 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1740 Some((open.byte_range(), close.byte_range()))
1741 })
1742 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1743 }
1744
1745 pub fn remote_selections_in_range<'a>(
1746 &'a self,
1747 range: Range<Anchor>,
1748 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
1749 {
1750 self.remote_selections
1751 .iter()
1752 .filter(|(replica_id, _)| **replica_id != self.text.replica_id())
1753 .map(move |(replica_id, selections)| {
1754 let start_ix = match selections
1755 .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap())
1756 {
1757 Ok(ix) | Err(ix) => ix,
1758 };
1759 let end_ix = match selections
1760 .binary_search_by(|probe| probe.start.cmp(&range.end, self).unwrap())
1761 {
1762 Ok(ix) | Err(ix) => ix,
1763 };
1764
1765 (*replica_id, selections[start_ix..end_ix].iter())
1766 })
1767 }
1768
1769 pub fn all_diagnostics<'a, O>(&'a self) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1770 where
1771 O: 'a + FromAnchor,
1772 {
1773 self.diagnostics
1774 .iter()
1775 .map(|diagnostic| diagnostic.resolve(self))
1776 }
1777
1778 pub fn diagnostics_in_range<'a, T, O>(
1779 &'a self,
1780 search_range: Range<T>,
1781 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1782 where
1783 T: 'a + ToOffset,
1784 O: 'a + FromAnchor,
1785 {
1786 self.diagnostics.range(search_range, self, true)
1787 }
1788
1789 pub fn diagnostic_groups<O>(&self) -> Vec<DiagnosticGroup<O>>
1790 where
1791 O: FromAnchor + Ord + Copy,
1792 {
1793 self.diagnostics.groups(self)
1794 }
1795
1796 pub fn diagnostic_group<'a, O>(
1797 &'a self,
1798 group_id: usize,
1799 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1800 where
1801 O: 'a + FromAnchor,
1802 {
1803 self.diagnostics.group(group_id, self)
1804 }
1805
1806 pub fn diagnostics_update_count(&self) -> usize {
1807 self.diagnostics_update_count
1808 }
1809
1810 pub fn parse_count(&self) -> usize {
1811 self.parse_count
1812 }
1813}
1814
1815impl Clone for BufferSnapshot {
1816 fn clone(&self) -> Self {
1817 Self {
1818 text: self.text.clone(),
1819 tree: self.tree.clone(),
1820 remote_selections: self.remote_selections.clone(),
1821 diagnostics: self.diagnostics.clone(),
1822 diagnostics_update_count: self.diagnostics_update_count,
1823 is_parsing: self.is_parsing,
1824 language: self.language.clone(),
1825 parse_count: self.parse_count,
1826 }
1827 }
1828}
1829
1830impl Deref for BufferSnapshot {
1831 type Target = text::BufferSnapshot;
1832
1833 fn deref(&self) -> &Self::Target {
1834 &self.text
1835 }
1836}
1837
1838impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1839 type I = ByteChunks<'a>;
1840
1841 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1842 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1843 }
1844}
1845
1846struct ByteChunks<'a>(rope::Chunks<'a>);
1847
1848impl<'a> Iterator for ByteChunks<'a> {
1849 type Item = &'a [u8];
1850
1851 fn next(&mut self) -> Option<Self::Item> {
1852 self.0.next().map(str::as_bytes)
1853 }
1854}
1855
1856unsafe impl<'a> Send for BufferChunks<'a> {}
1857
1858impl<'a> BufferChunks<'a> {
1859 pub fn seek(&mut self, offset: usize) {
1860 self.range.start = offset;
1861 self.chunks.seek(self.range.start);
1862 if let Some(highlights) = self.highlights.as_mut() {
1863 highlights
1864 .stack
1865 .retain(|(end_offset, _)| *end_offset > offset);
1866 if let Some((mat, capture_ix)) = &highlights.next_capture {
1867 let capture = mat.captures[*capture_ix as usize];
1868 if offset >= capture.node.start_byte() {
1869 let next_capture_end = capture.node.end_byte();
1870 if offset < next_capture_end {
1871 highlights.stack.push((
1872 next_capture_end,
1873 highlights.highlight_map.get(capture.index),
1874 ));
1875 }
1876 highlights.next_capture.take();
1877 }
1878 }
1879 highlights.captures.set_byte_range(self.range.clone());
1880 }
1881 }
1882
1883 pub fn offset(&self) -> usize {
1884 self.range.start
1885 }
1886
1887 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1888 let depth = match endpoint.severity {
1889 DiagnosticSeverity::ERROR => &mut self.error_depth,
1890 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1891 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1892 DiagnosticSeverity::HINT => &mut self.hint_depth,
1893 _ => return,
1894 };
1895 if endpoint.is_start {
1896 *depth += 1;
1897 } else {
1898 *depth -= 1;
1899 }
1900 }
1901
1902 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1903 if self.error_depth > 0 {
1904 Some(DiagnosticSeverity::ERROR)
1905 } else if self.warning_depth > 0 {
1906 Some(DiagnosticSeverity::WARNING)
1907 } else if self.information_depth > 0 {
1908 Some(DiagnosticSeverity::INFORMATION)
1909 } else if self.hint_depth > 0 {
1910 Some(DiagnosticSeverity::HINT)
1911 } else {
1912 None
1913 }
1914 }
1915}
1916
1917impl<'a> Iterator for BufferChunks<'a> {
1918 type Item = Chunk<'a>;
1919
1920 fn next(&mut self) -> Option<Self::Item> {
1921 let mut next_capture_start = usize::MAX;
1922 let mut next_diagnostic_endpoint = usize::MAX;
1923
1924 if let Some(highlights) = self.highlights.as_mut() {
1925 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1926 if *parent_capture_end <= self.range.start {
1927 highlights.stack.pop();
1928 } else {
1929 break;
1930 }
1931 }
1932
1933 if highlights.next_capture.is_none() {
1934 highlights.next_capture = highlights.captures.next();
1935 }
1936
1937 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1938 let capture = mat.captures[*capture_ix as usize];
1939 if self.range.start < capture.node.start_byte() {
1940 next_capture_start = capture.node.start_byte();
1941 break;
1942 } else {
1943 let highlight_id = highlights.highlight_map.get(capture.index);
1944 highlights
1945 .stack
1946 .push((capture.node.end_byte(), highlight_id));
1947 highlights.next_capture = highlights.captures.next();
1948 }
1949 }
1950 }
1951
1952 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1953 if endpoint.offset <= self.range.start {
1954 self.update_diagnostic_depths(endpoint);
1955 self.diagnostic_endpoints.next();
1956 } else {
1957 next_diagnostic_endpoint = endpoint.offset;
1958 break;
1959 }
1960 }
1961
1962 if let Some(chunk) = self.chunks.peek() {
1963 let chunk_start = self.range.start;
1964 let mut chunk_end = (self.chunks.offset() + chunk.len())
1965 .min(next_capture_start)
1966 .min(next_diagnostic_endpoint);
1967 let mut highlight_style = None;
1968 if let Some(highlights) = self.highlights.as_ref() {
1969 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1970 chunk_end = chunk_end.min(*parent_capture_end);
1971 highlight_style = parent_highlight_id.style(highlights.theme);
1972 }
1973 }
1974
1975 let slice =
1976 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1977 self.range.start = chunk_end;
1978 if self.range.start == self.chunks.offset() + chunk.len() {
1979 self.chunks.next().unwrap();
1980 }
1981
1982 Some(Chunk {
1983 text: slice,
1984 highlight_style,
1985 diagnostic: self.current_diagnostic_severity(),
1986 })
1987 } else {
1988 None
1989 }
1990 }
1991}
1992
1993impl QueryCursorHandle {
1994 fn new() -> Self {
1995 QueryCursorHandle(Some(
1996 QUERY_CURSORS
1997 .lock()
1998 .pop()
1999 .unwrap_or_else(|| QueryCursor::new()),
2000 ))
2001 }
2002}
2003
2004impl Deref for QueryCursorHandle {
2005 type Target = QueryCursor;
2006
2007 fn deref(&self) -> &Self::Target {
2008 self.0.as_ref().unwrap()
2009 }
2010}
2011
2012impl DerefMut for QueryCursorHandle {
2013 fn deref_mut(&mut self) -> &mut Self::Target {
2014 self.0.as_mut().unwrap()
2015 }
2016}
2017
2018impl Drop for QueryCursorHandle {
2019 fn drop(&mut self) {
2020 let mut cursor = self.0.take().unwrap();
2021 cursor.set_byte_range(0..usize::MAX);
2022 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2023 QUERY_CURSORS.lock().push(cursor)
2024 }
2025}
2026
2027trait ToTreeSitterPoint {
2028 fn to_ts_point(self) -> tree_sitter::Point;
2029 fn from_ts_point(point: tree_sitter::Point) -> Self;
2030}
2031
2032impl ToTreeSitterPoint for Point {
2033 fn to_ts_point(self) -> tree_sitter::Point {
2034 tree_sitter::Point::new(self.row as usize, self.column as usize)
2035 }
2036
2037 fn from_ts_point(point: tree_sitter::Point) -> Self {
2038 Point::new(point.row as u32, point.column as u32)
2039 }
2040}
2041
2042impl operation_queue::Operation for Operation {
2043 fn lamport_timestamp(&self) -> clock::Lamport {
2044 match self {
2045 Operation::Buffer(_) => {
2046 unreachable!("buffer operations should never be deferred at this layer")
2047 }
2048 Operation::UpdateDiagnostics {
2049 lamport_timestamp, ..
2050 }
2051 | Operation::UpdateSelections {
2052 lamport_timestamp, ..
2053 }
2054 | Operation::RemoveSelections {
2055 lamport_timestamp, ..
2056 } => *lamport_timestamp,
2057 }
2058 }
2059}
2060
2061impl Default for Diagnostic {
2062 fn default() -> Self {
2063 Self {
2064 source: Default::default(),
2065 code: Default::default(),
2066 severity: DiagnosticSeverity::ERROR,
2067 message: Default::default(),
2068 group_id: Default::default(),
2069 is_primary: Default::default(),
2070 is_valid: true,
2071 }
2072 }
2073}
2074
2075pub fn contiguous_ranges(
2076 values: impl Iterator<Item = u32>,
2077 max_len: usize,
2078) -> impl Iterator<Item = Range<u32>> {
2079 let mut values = values.into_iter();
2080 let mut current_range: Option<Range<u32>> = None;
2081 std::iter::from_fn(move || loop {
2082 if let Some(value) = values.next() {
2083 if let Some(range) = &mut current_range {
2084 if value == range.end && range.len() < max_len {
2085 range.end += 1;
2086 continue;
2087 }
2088 }
2089
2090 let prev_range = current_range.clone();
2091 current_range = Some(value..(value + 1));
2092 if prev_range.is_some() {
2093 return prev_range;
2094 }
2095 } else {
2096 return current_range.take();
2097 }
2098 })
2099}