1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 range_from_lsp,
10};
11use anyhow::{anyhow, Result};
12use clock::ReplicaId;
13use futures::FutureExt as _;
14use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
15use lazy_static::lazy_static;
16use lsp::LanguageServer;
17use parking_lot::Mutex;
18use postage::{prelude::Stream, sink::Sink, watch};
19use similar::{ChangeTag, TextDiff};
20use smol::future::yield_now;
21use std::{
22 any::Any,
23 cell::RefCell,
24 cmp::{self, Ordering},
25 collections::{BTreeMap, HashMap},
26 ffi::OsString,
27 future::Future,
28 iter::{Iterator, Peekable},
29 ops::{Deref, DerefMut, Range, Sub},
30 path::{Path, PathBuf},
31 str,
32 sync::Arc,
33 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
34 vec,
35};
36use sum_tree::TreeMap;
37use text::{operation_queue::OperationQueue, rope::TextDimension};
38pub use text::{Buffer as TextBuffer, Operation as _, *};
39use theme::SyntaxTheme;
40use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
41use util::{post_inc, TryFutureExt as _};
42
43#[cfg(any(test, feature = "test-support"))]
44pub use tree_sitter_rust;
45
46pub use lsp::DiagnosticSeverity;
47
48thread_local! {
49 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
50}
51
52lazy_static! {
53 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
54}
55
56// TODO - Make this configurable
57const INDENT_SIZE: u32 = 4;
58
59pub struct Buffer {
60 text: TextBuffer,
61 file: Option<Box<dyn File>>,
62 saved_version: clock::Global,
63 saved_mtime: SystemTime,
64 language: Option<Arc<Language>>,
65 autoindent_requests: Vec<Arc<AutoindentRequest>>,
66 pending_autoindent: Option<Task<()>>,
67 sync_parse_timeout: Duration,
68 syntax_tree: Mutex<Option<SyntaxTree>>,
69 parsing_in_background: bool,
70 parse_count: usize,
71 diagnostics: DiagnosticSet,
72 remote_selections: TreeMap<ReplicaId, SelectionSet>,
73 selections_update_count: usize,
74 diagnostics_update_count: usize,
75 language_server: Option<LanguageServerState>,
76 deferred_ops: OperationQueue<Operation>,
77 #[cfg(test)]
78 pub(crate) operations: Vec<Operation>,
79}
80
81pub struct BufferSnapshot {
82 text: text::BufferSnapshot,
83 tree: Option<Tree>,
84 diagnostics: DiagnosticSet,
85 diagnostics_update_count: usize,
86 remote_selections: TreeMap<ReplicaId, SelectionSet>,
87 selections_update_count: usize,
88 is_parsing: bool,
89 language: Option<Arc<Language>>,
90 parse_count: usize,
91}
92
93#[derive(Clone, Debug)]
94struct SelectionSet {
95 selections: Arc<[Selection<Anchor>]>,
96 lamport_timestamp: clock::Lamport,
97}
98
99#[derive(Clone, Debug, PartialEq, Eq)]
100pub struct GroupId {
101 source: Arc<str>,
102 id: usize,
103}
104
105#[derive(Clone, Debug, PartialEq, Eq)]
106pub struct Diagnostic {
107 pub code: Option<String>,
108 pub severity: DiagnosticSeverity,
109 pub message: String,
110 pub group_id: usize,
111 pub is_valid: bool,
112 pub is_primary: bool,
113 pub is_disk_based: bool,
114}
115
116struct LanguageServerState {
117 server: Arc<LanguageServer>,
118 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
119 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
120 next_version: usize,
121 _maintain_server: Task<Option<()>>,
122}
123
124#[derive(Clone)]
125struct LanguageServerSnapshot {
126 buffer_snapshot: text::BufferSnapshot,
127 version: usize,
128 path: Arc<Path>,
129}
130
131#[derive(Clone, Debug)]
132pub enum Operation {
133 Buffer(text::Operation),
134 UpdateDiagnostics {
135 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
136 lamport_timestamp: clock::Lamport,
137 },
138 UpdateSelections {
139 replica_id: ReplicaId,
140 selections: Arc<[Selection<Anchor>]>,
141 lamport_timestamp: clock::Lamport,
142 },
143}
144
145#[derive(Clone, Debug, Eq, PartialEq)]
146pub enum Event {
147 Edited,
148 Dirtied,
149 Saved,
150 FileHandleChanged,
151 Reloaded,
152 Reparsed,
153 DiagnosticsUpdated,
154 Closed,
155}
156
157pub trait File {
158 fn mtime(&self) -> SystemTime;
159
160 /// Returns the path of this file relative to the worktree's root directory.
161 fn path(&self) -> &Arc<Path>;
162
163 /// Returns the absolute path of this file.
164 fn abs_path(&self) -> Option<PathBuf>;
165
166 /// Returns the path of this file relative to the worktree's parent directory (this means it
167 /// includes the name of the worktree's root folder).
168 fn full_path(&self) -> PathBuf;
169
170 /// Returns the last component of this handle's absolute path. If this handle refers to the root
171 /// of its worktree, then this method will return the name of the worktree itself.
172 fn file_name(&self) -> Option<OsString>;
173
174 fn is_deleted(&self) -> bool;
175
176 fn save(
177 &self,
178 buffer_id: u64,
179 text: Rope,
180 version: clock::Global,
181 cx: &mut MutableAppContext,
182 ) -> Task<Result<(clock::Global, SystemTime)>>;
183
184 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
185
186 fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
187 -> Option<Task<Result<()>>>;
188
189 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
190
191 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
192
193 fn as_any(&self) -> &dyn Any;
194}
195
196struct QueryCursorHandle(Option<QueryCursor>);
197
198#[derive(Clone)]
199struct SyntaxTree {
200 tree: Tree,
201 version: clock::Global,
202}
203
204#[derive(Clone)]
205struct AutoindentRequest {
206 before_edit: BufferSnapshot,
207 edited: Vec<Anchor>,
208 inserted: Option<Vec<Range<Anchor>>>,
209}
210
211#[derive(Debug)]
212struct IndentSuggestion {
213 basis_row: u32,
214 indent: bool,
215}
216
217struct TextProvider<'a>(&'a Rope);
218
219struct BufferChunkHighlights<'a> {
220 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
221 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
222 stack: Vec<(usize, HighlightId)>,
223 highlight_map: HighlightMap,
224 theme: &'a SyntaxTheme,
225 _query_cursor: QueryCursorHandle,
226}
227
228pub struct BufferChunks<'a> {
229 range: Range<usize>,
230 chunks: rope::Chunks<'a>,
231 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
232 error_depth: usize,
233 warning_depth: usize,
234 information_depth: usize,
235 hint_depth: usize,
236 highlights: Option<BufferChunkHighlights<'a>>,
237}
238
239#[derive(Clone, Copy, Debug, Default)]
240pub struct Chunk<'a> {
241 pub text: &'a str,
242 pub highlight_style: Option<HighlightStyle>,
243 pub diagnostic: Option<DiagnosticSeverity>,
244}
245
246pub(crate) struct Diff {
247 base_version: clock::Global,
248 new_text: Arc<str>,
249 changes: Vec<(ChangeTag, usize)>,
250}
251
252#[derive(Clone, Copy)]
253struct DiagnosticEndpoint {
254 offset: usize,
255 is_start: bool,
256 severity: DiagnosticSeverity,
257}
258
259impl Buffer {
260 pub fn new<T: Into<Arc<str>>>(
261 replica_id: ReplicaId,
262 base_text: T,
263 cx: &mut ModelContext<Self>,
264 ) -> Self {
265 Self::build(
266 TextBuffer::new(
267 replica_id,
268 cx.model_id() as u64,
269 History::new(base_text.into()),
270 ),
271 None,
272 )
273 }
274
275 pub fn from_file<T: Into<Arc<str>>>(
276 replica_id: ReplicaId,
277 base_text: T,
278 file: Box<dyn File>,
279 cx: &mut ModelContext<Self>,
280 ) -> Self {
281 Self::build(
282 TextBuffer::new(
283 replica_id,
284 cx.model_id() as u64,
285 History::new(base_text.into()),
286 ),
287 Some(file),
288 )
289 }
290
291 pub fn from_proto(
292 replica_id: ReplicaId,
293 message: proto::Buffer,
294 file: Option<Box<dyn File>>,
295 cx: &mut ModelContext<Self>,
296 ) -> Result<Self> {
297 let fragments_len = message.fragments.len();
298 let buffer = TextBuffer::from_parts(
299 replica_id,
300 message.id,
301 &message.visible_text,
302 &message.deleted_text,
303 message
304 .undo_map
305 .into_iter()
306 .map(proto::deserialize_undo_map_entry),
307 message
308 .fragments
309 .into_iter()
310 .enumerate()
311 .map(|(i, fragment)| {
312 proto::deserialize_buffer_fragment(fragment, i, fragments_len)
313 }),
314 message.lamport_timestamp,
315 From::from(message.version),
316 );
317 let mut this = Self::build(buffer, file);
318 for selection_set in message.selections {
319 this.remote_selections.insert(
320 selection_set.replica_id as ReplicaId,
321 SelectionSet {
322 selections: proto::deserialize_selections(selection_set.selections),
323 lamport_timestamp: clock::Lamport {
324 replica_id: selection_set.replica_id as ReplicaId,
325 value: selection_set.lamport_timestamp,
326 },
327 },
328 );
329 }
330 let snapshot = this.snapshot();
331 let entries = proto::deserialize_diagnostics(message.diagnostics);
332 this.apply_diagnostic_update(
333 DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
334 cx,
335 );
336
337 let deferred_ops = message
338 .deferred_operations
339 .into_iter()
340 .map(proto::deserialize_operation)
341 .collect::<Result<Vec<_>>>()?;
342 this.apply_ops(deferred_ops, cx)?;
343
344 Ok(this)
345 }
346
347 pub fn to_proto(&self) -> proto::Buffer {
348 proto::Buffer {
349 id: self.remote_id(),
350 visible_text: self.text.text(),
351 deleted_text: self.text.deleted_text(),
352 undo_map: self
353 .text
354 .undo_history()
355 .map(proto::serialize_undo_map_entry)
356 .collect(),
357 version: From::from(&self.version),
358 lamport_timestamp: self.lamport_clock.value,
359 fragments: self
360 .text
361 .fragments()
362 .map(proto::serialize_buffer_fragment)
363 .collect(),
364 selections: self
365 .remote_selections
366 .iter()
367 .map(|(replica_id, set)| proto::SelectionSet {
368 replica_id: *replica_id as u32,
369 selections: proto::serialize_selections(&set.selections),
370 lamport_timestamp: set.lamport_timestamp.value,
371 })
372 .collect(),
373 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
374 deferred_operations: self
375 .deferred_ops
376 .iter()
377 .map(proto::serialize_operation)
378 .chain(
379 self.text
380 .deferred_ops()
381 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
382 )
383 .collect(),
384 }
385 }
386
387 pub fn with_language(
388 mut self,
389 language: Option<Arc<Language>>,
390 language_server: Option<Arc<LanguageServer>>,
391 cx: &mut ModelContext<Self>,
392 ) -> Self {
393 self.set_language(language, language_server, cx);
394 self
395 }
396
397 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
398 let saved_mtime;
399 if let Some(file) = file.as_ref() {
400 saved_mtime = file.mtime();
401 } else {
402 saved_mtime = UNIX_EPOCH;
403 }
404
405 Self {
406 saved_mtime,
407 saved_version: buffer.version(),
408 text: buffer,
409 file,
410 syntax_tree: Mutex::new(None),
411 parsing_in_background: false,
412 parse_count: 0,
413 sync_parse_timeout: Duration::from_millis(1),
414 autoindent_requests: Default::default(),
415 pending_autoindent: Default::default(),
416 language: None,
417 remote_selections: Default::default(),
418 selections_update_count: 0,
419 diagnostics: Default::default(),
420 diagnostics_update_count: 0,
421 language_server: None,
422 deferred_ops: OperationQueue::new(),
423 #[cfg(test)]
424 operations: Default::default(),
425 }
426 }
427
428 pub fn snapshot(&self) -> BufferSnapshot {
429 BufferSnapshot {
430 text: self.text.snapshot(),
431 tree: self.syntax_tree(),
432 remote_selections: self.remote_selections.clone(),
433 diagnostics: self.diagnostics.clone(),
434 diagnostics_update_count: self.diagnostics_update_count,
435 is_parsing: self.parsing_in_background,
436 language: self.language.clone(),
437 parse_count: self.parse_count,
438 selections_update_count: self.selections_update_count,
439 }
440 }
441
442 pub fn file(&self) -> Option<&dyn File> {
443 self.file.as_deref()
444 }
445
446 pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
447 let file = if let Some(file) = self.file.as_ref() {
448 file
449 } else {
450 return Task::ready(Err(anyhow!("buffer has no file")));
451 };
452
453 if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
454 let server = server.clone();
455 let abs_path = file.abs_path().unwrap();
456 let version = self.version();
457 cx.spawn(|this, mut cx| async move {
458 let edits = server
459 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
460 text_document: lsp::TextDocumentIdentifier::new(
461 lsp::Url::from_file_path(&abs_path).unwrap(),
462 ),
463 options: Default::default(),
464 work_done_progress_params: Default::default(),
465 })
466 .await?;
467
468 if let Some(edits) = edits {
469 this.update(&mut cx, |this, cx| {
470 if this.version == version {
471 for edit in &edits {
472 let range = range_from_lsp(edit.range);
473 if this.clip_point_utf16(range.start, Bias::Left) != range.start
474 || this.clip_point_utf16(range.end, Bias::Left) != range.end
475 {
476 return Err(anyhow!(
477 "invalid formatting edits received from language server"
478 ));
479 }
480 }
481
482 for edit in edits.into_iter().rev() {
483 this.edit([range_from_lsp(edit.range)], edit.new_text, cx);
484 }
485 Ok(())
486 } else {
487 Err(anyhow!("buffer edited since starting to format"))
488 }
489 })
490 } else {
491 Ok(())
492 }
493 })
494 } else {
495 let format = file.format_remote(self.remote_id(), cx.as_mut());
496 cx.spawn(|_, _| async move {
497 if let Some(format) = format {
498 format.await?;
499 }
500 Ok(())
501 })
502 }
503 }
504
505 pub fn save(
506 &mut self,
507 cx: &mut ModelContext<Self>,
508 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
509 let file = self
510 .file
511 .as_ref()
512 .ok_or_else(|| anyhow!("buffer has no file"))?;
513 let text = self.as_rope().clone();
514 let version = self.version();
515 let save = file.save(self.remote_id(), text, version, cx.as_mut());
516 Ok(cx.spawn(|this, mut cx| async move {
517 let (version, mtime) = save.await?;
518 this.update(&mut cx, |this, cx| {
519 this.did_save(version.clone(), mtime, None, cx);
520 });
521 Ok((version, mtime))
522 }))
523 }
524
525 pub fn set_language(
526 &mut self,
527 language: Option<Arc<Language>>,
528 language_server: Option<Arc<lsp::LanguageServer>>,
529 cx: &mut ModelContext<Self>,
530 ) {
531 self.language = language;
532 self.language_server = if let Some(server) = language_server {
533 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
534 Some(LanguageServerState {
535 latest_snapshot: latest_snapshot_tx,
536 pending_snapshots: Default::default(),
537 next_version: 0,
538 server: server.clone(),
539 _maintain_server: cx.background().spawn(
540 async move {
541 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
542 while let Some(snapshot) = latest_snapshot_rx.recv().await {
543 if let Some(snapshot) = snapshot {
544 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
545 if let Some(prev_snapshot) = prev_snapshot {
546 let changes = lsp::DidChangeTextDocumentParams {
547 text_document: lsp::VersionedTextDocumentIdentifier::new(
548 uri,
549 snapshot.version as i32,
550 ),
551 content_changes: snapshot
552 .buffer_snapshot
553 .edits_since::<(PointUtf16, usize)>(
554 prev_snapshot.buffer_snapshot.version(),
555 )
556 .map(|edit| {
557 let edit_start = edit.new.start.0;
558 let edit_end = edit_start
559 + (edit.old.end.0 - edit.old.start.0);
560 let new_text = snapshot
561 .buffer_snapshot
562 .text_for_range(
563 edit.new.start.1..edit.new.end.1,
564 )
565 .collect();
566 lsp::TextDocumentContentChangeEvent {
567 range: Some(lsp::Range::new(
568 lsp::Position::new(
569 edit_start.row,
570 edit_start.column,
571 ),
572 lsp::Position::new(
573 edit_end.row,
574 edit_end.column,
575 ),
576 )),
577 range_length: None,
578 text: new_text,
579 }
580 })
581 .collect(),
582 };
583 server
584 .notify::<lsp::notification::DidChangeTextDocument>(changes)
585 .await?;
586 } else {
587 server
588 .notify::<lsp::notification::DidOpenTextDocument>(
589 lsp::DidOpenTextDocumentParams {
590 text_document: lsp::TextDocumentItem::new(
591 uri,
592 Default::default(),
593 snapshot.version as i32,
594 snapshot.buffer_snapshot.text().to_string(),
595 ),
596 },
597 )
598 .await?;
599 }
600
601 prev_snapshot = Some(snapshot);
602 }
603 }
604 Ok(())
605 }
606 .log_err(),
607 ),
608 })
609 } else {
610 None
611 };
612
613 self.reparse(cx);
614 self.update_language_server();
615 }
616
617 pub fn did_save(
618 &mut self,
619 version: clock::Global,
620 mtime: SystemTime,
621 new_file: Option<Box<dyn File>>,
622 cx: &mut ModelContext<Self>,
623 ) {
624 self.saved_mtime = mtime;
625 self.saved_version = version;
626 if let Some(new_file) = new_file {
627 self.file = Some(new_file);
628 }
629 if let Some(state) = &self.language_server {
630 cx.background()
631 .spawn(
632 state
633 .server
634 .notify::<lsp::notification::DidSaveTextDocument>(
635 lsp::DidSaveTextDocumentParams {
636 text_document: lsp::TextDocumentIdentifier {
637 uri: lsp::Url::from_file_path(
638 self.file.as_ref().unwrap().abs_path().unwrap(),
639 )
640 .unwrap(),
641 },
642 text: None,
643 },
644 ),
645 )
646 .detach()
647 }
648 cx.emit(Event::Saved);
649 }
650
651 pub fn file_updated(
652 &mut self,
653 new_file: Box<dyn File>,
654 cx: &mut ModelContext<Self>,
655 ) -> Option<Task<()>> {
656 let old_file = self.file.as_ref()?;
657 let mut file_changed = false;
658 let mut task = None;
659
660 if new_file.path() != old_file.path() {
661 file_changed = true;
662 }
663
664 if new_file.is_deleted() {
665 if !old_file.is_deleted() {
666 file_changed = true;
667 if !self.is_dirty() {
668 cx.emit(Event::Dirtied);
669 }
670 }
671 } else {
672 let new_mtime = new_file.mtime();
673 if new_mtime != old_file.mtime() {
674 file_changed = true;
675
676 if !self.is_dirty() {
677 task = Some(cx.spawn(|this, mut cx| {
678 async move {
679 let new_text = this.read_with(&cx, |this, cx| {
680 this.file.as_ref().and_then(|file| file.load_local(cx))
681 });
682 if let Some(new_text) = new_text {
683 let new_text = new_text.await?;
684 let diff = this
685 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
686 .await;
687 this.update(&mut cx, |this, cx| {
688 if this.apply_diff(diff, cx) {
689 this.saved_version = this.version();
690 this.saved_mtime = new_mtime;
691 cx.emit(Event::Reloaded);
692 }
693 });
694 }
695 Ok(())
696 }
697 .log_err()
698 .map(drop)
699 }));
700 }
701 }
702 }
703
704 if file_changed {
705 cx.emit(Event::FileHandleChanged);
706 }
707 self.file = Some(new_file);
708 task
709 }
710
711 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
712 cx.emit(Event::Closed);
713 }
714
715 pub fn language(&self) -> Option<&Arc<Language>> {
716 self.language.as_ref()
717 }
718
719 pub fn parse_count(&self) -> usize {
720 self.parse_count
721 }
722
723 pub fn selections_update_count(&self) -> usize {
724 self.selections_update_count
725 }
726
727 pub fn diagnostics_update_count(&self) -> usize {
728 self.diagnostics_update_count
729 }
730
731 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
732 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
733 self.interpolate_tree(syntax_tree);
734 Some(syntax_tree.tree.clone())
735 } else {
736 None
737 }
738 }
739
740 #[cfg(any(test, feature = "test-support"))]
741 pub fn is_parsing(&self) -> bool {
742 self.parsing_in_background
743 }
744
745 #[cfg(test)]
746 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
747 self.sync_parse_timeout = timeout;
748 }
749
750 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
751 if self.parsing_in_background {
752 return false;
753 }
754
755 if let Some(grammar) = self.grammar().cloned() {
756 let old_tree = self.syntax_tree();
757 let text = self.as_rope().clone();
758 let parsed_version = self.version();
759 let parse_task = cx.background().spawn({
760 let grammar = grammar.clone();
761 async move { Self::parse_text(&text, old_tree, &grammar) }
762 });
763
764 match cx
765 .background()
766 .block_with_timeout(self.sync_parse_timeout, parse_task)
767 {
768 Ok(new_tree) => {
769 self.did_finish_parsing(new_tree, parsed_version, cx);
770 return true;
771 }
772 Err(parse_task) => {
773 self.parsing_in_background = true;
774 cx.spawn(move |this, mut cx| async move {
775 let new_tree = parse_task.await;
776 this.update(&mut cx, move |this, cx| {
777 let grammar_changed = this
778 .grammar()
779 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
780 let parse_again =
781 this.version.changed_since(&parsed_version) || grammar_changed;
782 this.parsing_in_background = false;
783 this.did_finish_parsing(new_tree, parsed_version, cx);
784
785 if parse_again && this.reparse(cx) {
786 return;
787 }
788 });
789 })
790 .detach();
791 }
792 }
793 }
794 false
795 }
796
797 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
798 PARSER.with(|parser| {
799 let mut parser = parser.borrow_mut();
800 parser
801 .set_language(grammar.ts_language)
802 .expect("incompatible grammar");
803 let mut chunks = text.chunks_in_range(0..text.len());
804 let tree = parser
805 .parse_with(
806 &mut move |offset, _| {
807 chunks.seek(offset);
808 chunks.next().unwrap_or("").as_bytes()
809 },
810 old_tree.as_ref(),
811 )
812 .unwrap();
813 tree
814 })
815 }
816
817 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
818 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
819 let (bytes, lines) = edit.flatten();
820 tree.tree.edit(&InputEdit {
821 start_byte: bytes.new.start,
822 old_end_byte: bytes.new.start + bytes.old.len(),
823 new_end_byte: bytes.new.end,
824 start_position: lines.new.start.to_ts_point(),
825 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
826 .to_ts_point(),
827 new_end_position: lines.new.end.to_ts_point(),
828 });
829 }
830 tree.version = self.version();
831 }
832
833 fn did_finish_parsing(
834 &mut self,
835 tree: Tree,
836 version: clock::Global,
837 cx: &mut ModelContext<Self>,
838 ) {
839 self.parse_count += 1;
840 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
841 self.request_autoindent(cx);
842 cx.emit(Event::Reparsed);
843 cx.notify();
844 }
845
846 pub fn update_diagnostics<T>(
847 &mut self,
848 version: Option<i32>,
849 mut diagnostics: Vec<DiagnosticEntry<T>>,
850 cx: &mut ModelContext<Self>,
851 ) -> Result<Operation>
852 where
853 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
854 {
855 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
856 Ordering::Equal
857 .then_with(|| b.is_primary.cmp(&a.is_primary))
858 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
859 .then_with(|| a.severity.cmp(&b.severity))
860 .then_with(|| a.message.cmp(&b.message))
861 }
862
863 let version = version.map(|version| version as usize);
864 let content = if let Some(version) = version {
865 let language_server = self.language_server.as_mut().unwrap();
866 language_server
867 .pending_snapshots
868 .retain(|&v, _| v >= version);
869 let snapshot = language_server
870 .pending_snapshots
871 .get(&version)
872 .ok_or_else(|| anyhow!("missing snapshot"))?;
873 &snapshot.buffer_snapshot
874 } else {
875 self.deref()
876 };
877
878 diagnostics.sort_unstable_by(|a, b| {
879 Ordering::Equal
880 .then_with(|| a.range.start.cmp(&b.range.start))
881 .then_with(|| b.range.end.cmp(&a.range.end))
882 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
883 });
884
885 let mut sanitized_diagnostics = Vec::new();
886 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
887 let mut last_edit_old_end = T::default();
888 let mut last_edit_new_end = T::default();
889 'outer: for entry in diagnostics {
890 let mut start = entry.range.start;
891 let mut end = entry.range.end;
892
893 // Some diagnostics are based on files on disk instead of buffers'
894 // current contents. Adjust these diagnostics' ranges to reflect
895 // any unsaved edits.
896 if entry.diagnostic.is_disk_based {
897 while let Some(edit) = edits_since_save.peek() {
898 if edit.old.end <= start {
899 last_edit_old_end = edit.old.end;
900 last_edit_new_end = edit.new.end;
901 edits_since_save.next();
902 } else if edit.old.start <= end && edit.old.end >= start {
903 continue 'outer;
904 } else {
905 break;
906 }
907 }
908
909 let start_overshoot = start - last_edit_old_end;
910 start = last_edit_new_end;
911 start.add_assign(&start_overshoot);
912
913 let end_overshoot = end - last_edit_old_end;
914 end = last_edit_new_end;
915 end.add_assign(&end_overshoot);
916 }
917
918 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
919 let mut range = range.start.to_point(content)..range.end.to_point(content);
920 // Expand empty ranges by one character
921 if range.start == range.end {
922 range.end.column += 1;
923 range.end = content.clip_point(range.end, Bias::Right);
924 if range.start == range.end && range.end.column > 0 {
925 range.start.column -= 1;
926 range.start = content.clip_point(range.start, Bias::Left);
927 }
928 }
929
930 sanitized_diagnostics.push(DiagnosticEntry {
931 range,
932 diagnostic: entry.diagnostic,
933 });
934 }
935 drop(edits_since_save);
936
937 let set = DiagnosticSet::new(sanitized_diagnostics, content);
938 self.apply_diagnostic_update(set.clone(), cx);
939 Ok(Operation::UpdateDiagnostics {
940 diagnostics: set.iter().cloned().collect(),
941 lamport_timestamp: self.text.lamport_clock.tick(),
942 })
943 }
944
945 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
946 if let Some(indent_columns) = self.compute_autoindents() {
947 let indent_columns = cx.background().spawn(indent_columns);
948 match cx
949 .background()
950 .block_with_timeout(Duration::from_micros(500), indent_columns)
951 {
952 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
953 Err(indent_columns) => {
954 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
955 let indent_columns = indent_columns.await;
956 this.update(&mut cx, |this, cx| {
957 this.apply_autoindents(indent_columns, cx);
958 });
959 }));
960 }
961 }
962 }
963 }
964
965 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
966 let max_rows_between_yields = 100;
967 let snapshot = self.snapshot();
968 if snapshot.language.is_none()
969 || snapshot.tree.is_none()
970 || self.autoindent_requests.is_empty()
971 {
972 return None;
973 }
974
975 let autoindent_requests = self.autoindent_requests.clone();
976 Some(async move {
977 let mut indent_columns = BTreeMap::new();
978 for request in autoindent_requests {
979 let old_to_new_rows = request
980 .edited
981 .iter()
982 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
983 .zip(
984 request
985 .edited
986 .iter()
987 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
988 )
989 .collect::<BTreeMap<u32, u32>>();
990
991 let mut old_suggestions = HashMap::<u32, u32>::default();
992 let old_edited_ranges =
993 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
994 for old_edited_range in old_edited_ranges {
995 let suggestions = request
996 .before_edit
997 .suggest_autoindents(old_edited_range.clone())
998 .into_iter()
999 .flatten();
1000 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1001 let indentation_basis = old_to_new_rows
1002 .get(&suggestion.basis_row)
1003 .and_then(|from_row| old_suggestions.get(from_row).copied())
1004 .unwrap_or_else(|| {
1005 request
1006 .before_edit
1007 .indent_column_for_line(suggestion.basis_row)
1008 });
1009 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1010 old_suggestions.insert(
1011 *old_to_new_rows.get(&old_row).unwrap(),
1012 indentation_basis + delta,
1013 );
1014 }
1015 yield_now().await;
1016 }
1017
1018 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1019 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1020 let new_edited_row_ranges =
1021 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1022 for new_edited_row_range in new_edited_row_ranges {
1023 let suggestions = snapshot
1024 .suggest_autoindents(new_edited_row_range.clone())
1025 .into_iter()
1026 .flatten();
1027 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1028 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1029 let new_indentation = indent_columns
1030 .get(&suggestion.basis_row)
1031 .copied()
1032 .unwrap_or_else(|| {
1033 snapshot.indent_column_for_line(suggestion.basis_row)
1034 })
1035 + delta;
1036 if old_suggestions
1037 .get(&new_row)
1038 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1039 {
1040 indent_columns.insert(new_row, new_indentation);
1041 }
1042 }
1043 yield_now().await;
1044 }
1045
1046 if let Some(inserted) = request.inserted.as_ref() {
1047 let inserted_row_ranges = contiguous_ranges(
1048 inserted
1049 .iter()
1050 .map(|range| range.to_point(&snapshot))
1051 .flat_map(|range| range.start.row..range.end.row + 1),
1052 max_rows_between_yields,
1053 );
1054 for inserted_row_range in inserted_row_ranges {
1055 let suggestions = snapshot
1056 .suggest_autoindents(inserted_row_range.clone())
1057 .into_iter()
1058 .flatten();
1059 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1060 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1061 let new_indentation = indent_columns
1062 .get(&suggestion.basis_row)
1063 .copied()
1064 .unwrap_or_else(|| {
1065 snapshot.indent_column_for_line(suggestion.basis_row)
1066 })
1067 + delta;
1068 indent_columns.insert(row, new_indentation);
1069 }
1070 yield_now().await;
1071 }
1072 }
1073 }
1074 indent_columns
1075 })
1076 }
1077
1078 fn apply_autoindents(
1079 &mut self,
1080 indent_columns: BTreeMap<u32, u32>,
1081 cx: &mut ModelContext<Self>,
1082 ) {
1083 self.autoindent_requests.clear();
1084 self.start_transaction();
1085 for (row, indent_column) in &indent_columns {
1086 self.set_indent_column_for_line(*row, *indent_column, cx);
1087 }
1088 self.end_transaction(cx);
1089 }
1090
1091 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1092 let current_column = self.indent_column_for_line(row);
1093 if column > current_column {
1094 let offset = Point::new(row, 0).to_offset(&*self);
1095 self.edit(
1096 [offset..offset],
1097 " ".repeat((column - current_column) as usize),
1098 cx,
1099 );
1100 } else if column < current_column {
1101 self.edit(
1102 [Point::new(row, 0)..Point::new(row, current_column - column)],
1103 "",
1104 cx,
1105 );
1106 }
1107 }
1108
1109 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1110 // TODO: it would be nice to not allocate here.
1111 let old_text = self.text();
1112 let base_version = self.version();
1113 cx.background().spawn(async move {
1114 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1115 .iter_all_changes()
1116 .map(|c| (c.tag(), c.value().len()))
1117 .collect::<Vec<_>>();
1118 Diff {
1119 base_version,
1120 new_text,
1121 changes,
1122 }
1123 })
1124 }
1125
1126 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1127 if self.version == diff.base_version {
1128 self.start_transaction();
1129 let mut offset = 0;
1130 for (tag, len) in diff.changes {
1131 let range = offset..(offset + len);
1132 match tag {
1133 ChangeTag::Equal => offset += len,
1134 ChangeTag::Delete => self.edit(Some(range), "", cx),
1135 ChangeTag::Insert => {
1136 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1137 offset += len;
1138 }
1139 }
1140 }
1141 self.end_transaction(cx);
1142 true
1143 } else {
1144 false
1145 }
1146 }
1147
1148 pub fn is_dirty(&self) -> bool {
1149 !self.saved_version.observed_all(&self.version)
1150 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1151 }
1152
1153 pub fn has_conflict(&self) -> bool {
1154 !self.saved_version.observed_all(&self.version)
1155 && self
1156 .file
1157 .as_ref()
1158 .map_or(false, |file| file.mtime() > self.saved_mtime)
1159 }
1160
1161 pub fn subscribe(&mut self) -> Subscription {
1162 self.text.subscribe()
1163 }
1164
1165 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1166 self.start_transaction_at(Instant::now())
1167 }
1168
1169 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1170 self.text.start_transaction_at(now)
1171 }
1172
1173 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1174 self.end_transaction_at(Instant::now(), cx)
1175 }
1176
1177 pub fn end_transaction_at(
1178 &mut self,
1179 now: Instant,
1180 cx: &mut ModelContext<Self>,
1181 ) -> Option<TransactionId> {
1182 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1183 let was_dirty = start_version != self.saved_version;
1184 self.did_edit(&start_version, was_dirty, cx);
1185 Some(transaction_id)
1186 } else {
1187 None
1188 }
1189 }
1190
1191 pub fn set_active_selections(
1192 &mut self,
1193 selections: Arc<[Selection<Anchor>]>,
1194 cx: &mut ModelContext<Self>,
1195 ) {
1196 let lamport_timestamp = self.text.lamport_clock.tick();
1197 self.remote_selections.insert(
1198 self.text.replica_id(),
1199 SelectionSet {
1200 selections: selections.clone(),
1201 lamport_timestamp,
1202 },
1203 );
1204 self.send_operation(
1205 Operation::UpdateSelections {
1206 replica_id: self.text.replica_id(),
1207 selections,
1208 lamport_timestamp,
1209 },
1210 cx,
1211 );
1212 }
1213
1214 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1215 self.set_active_selections(Arc::from([]), cx);
1216 }
1217
1218 fn update_language_server(&mut self) {
1219 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1220 language_server
1221 } else {
1222 return;
1223 };
1224 let abs_path = self
1225 .file
1226 .as_ref()
1227 .map_or(Path::new("/").to_path_buf(), |file| {
1228 file.abs_path().unwrap()
1229 });
1230
1231 let version = post_inc(&mut language_server.next_version);
1232 let snapshot = LanguageServerSnapshot {
1233 buffer_snapshot: self.text.snapshot(),
1234 version,
1235 path: Arc::from(abs_path),
1236 };
1237 language_server
1238 .pending_snapshots
1239 .insert(version, snapshot.clone());
1240 let _ = language_server
1241 .latest_snapshot
1242 .blocking_send(Some(snapshot));
1243 }
1244
1245 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1246 where
1247 I: IntoIterator<Item = Range<S>>,
1248 S: ToOffset,
1249 T: Into<String>,
1250 {
1251 self.edit_internal(ranges_iter, new_text, false, cx)
1252 }
1253
1254 pub fn edit_with_autoindent<I, S, T>(
1255 &mut self,
1256 ranges_iter: I,
1257 new_text: T,
1258 cx: &mut ModelContext<Self>,
1259 ) where
1260 I: IntoIterator<Item = Range<S>>,
1261 S: ToOffset,
1262 T: Into<String>,
1263 {
1264 self.edit_internal(ranges_iter, new_text, true, cx)
1265 }
1266
1267 pub fn edit_internal<I, S, T>(
1268 &mut self,
1269 ranges_iter: I,
1270 new_text: T,
1271 autoindent: bool,
1272 cx: &mut ModelContext<Self>,
1273 ) where
1274 I: IntoIterator<Item = Range<S>>,
1275 S: ToOffset,
1276 T: Into<String>,
1277 {
1278 let new_text = new_text.into();
1279
1280 // Skip invalid ranges and coalesce contiguous ones.
1281 let mut ranges: Vec<Range<usize>> = Vec::new();
1282 for range in ranges_iter {
1283 let range = range.start.to_offset(self)..range.end.to_offset(self);
1284 if !new_text.is_empty() || !range.is_empty() {
1285 if let Some(prev_range) = ranges.last_mut() {
1286 if prev_range.end >= range.start {
1287 prev_range.end = cmp::max(prev_range.end, range.end);
1288 } else {
1289 ranges.push(range);
1290 }
1291 } else {
1292 ranges.push(range);
1293 }
1294 }
1295 }
1296 if ranges.is_empty() {
1297 return;
1298 }
1299
1300 self.start_transaction();
1301 self.pending_autoindent.take();
1302 let autoindent_request = if autoindent && self.language.is_some() {
1303 let before_edit = self.snapshot();
1304 let edited = ranges
1305 .iter()
1306 .filter_map(|range| {
1307 let start = range.start.to_point(self);
1308 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1309 None
1310 } else {
1311 Some(self.anchor_before(range.start))
1312 }
1313 })
1314 .collect();
1315 Some((before_edit, edited))
1316 } else {
1317 None
1318 };
1319
1320 let first_newline_ix = new_text.find('\n');
1321 let new_text_len = new_text.len();
1322
1323 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1324
1325 if let Some((before_edit, edited)) = autoindent_request {
1326 let mut inserted = None;
1327 if let Some(first_newline_ix) = first_newline_ix {
1328 let mut delta = 0isize;
1329 inserted = Some(
1330 ranges
1331 .iter()
1332 .map(|range| {
1333 let start =
1334 (delta + range.start as isize) as usize + first_newline_ix + 1;
1335 let end = (delta + range.start as isize) as usize + new_text_len;
1336 delta +=
1337 (range.end as isize - range.start as isize) + new_text_len as isize;
1338 self.anchor_before(start)..self.anchor_after(end)
1339 })
1340 .collect(),
1341 );
1342 }
1343
1344 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1345 before_edit,
1346 edited,
1347 inserted,
1348 }));
1349 }
1350
1351 self.end_transaction(cx);
1352 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1353 }
1354
1355 fn did_edit(
1356 &mut self,
1357 old_version: &clock::Global,
1358 was_dirty: bool,
1359 cx: &mut ModelContext<Self>,
1360 ) {
1361 if self.edits_since::<usize>(old_version).next().is_none() {
1362 return;
1363 }
1364
1365 self.reparse(cx);
1366 self.update_language_server();
1367
1368 cx.emit(Event::Edited);
1369 if !was_dirty {
1370 cx.emit(Event::Dirtied);
1371 }
1372 cx.notify();
1373 }
1374
1375 fn grammar(&self) -> Option<&Arc<Grammar>> {
1376 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1377 }
1378
1379 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1380 &mut self,
1381 ops: I,
1382 cx: &mut ModelContext<Self>,
1383 ) -> Result<()> {
1384 self.pending_autoindent.take();
1385 let was_dirty = self.is_dirty();
1386 let old_version = self.version.clone();
1387 let mut deferred_ops = Vec::new();
1388 let buffer_ops = ops
1389 .into_iter()
1390 .filter_map(|op| match op {
1391 Operation::Buffer(op) => Some(op),
1392 _ => {
1393 if self.can_apply_op(&op) {
1394 self.apply_op(op, cx);
1395 } else {
1396 deferred_ops.push(op);
1397 }
1398 None
1399 }
1400 })
1401 .collect::<Vec<_>>();
1402 self.text.apply_ops(buffer_ops)?;
1403 self.deferred_ops.insert(deferred_ops);
1404 self.flush_deferred_ops(cx);
1405 self.did_edit(&old_version, was_dirty, cx);
1406 // Notify independently of whether the buffer was edited as the operations could include a
1407 // selection update.
1408 cx.notify();
1409 Ok(())
1410 }
1411
1412 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1413 let mut deferred_ops = Vec::new();
1414 for op in self.deferred_ops.drain().iter().cloned() {
1415 if self.can_apply_op(&op) {
1416 self.apply_op(op, cx);
1417 } else {
1418 deferred_ops.push(op);
1419 }
1420 }
1421 self.deferred_ops.insert(deferred_ops);
1422 }
1423
1424 fn can_apply_op(&self, operation: &Operation) -> bool {
1425 match operation {
1426 Operation::Buffer(_) => {
1427 unreachable!("buffer operations should never be applied at this layer")
1428 }
1429 Operation::UpdateDiagnostics {
1430 diagnostics: diagnostic_set,
1431 ..
1432 } => diagnostic_set.iter().all(|diagnostic| {
1433 self.text.can_resolve(&diagnostic.range.start)
1434 && self.text.can_resolve(&diagnostic.range.end)
1435 }),
1436 Operation::UpdateSelections { selections, .. } => selections
1437 .iter()
1438 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1439 }
1440 }
1441
1442 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1443 match operation {
1444 Operation::Buffer(_) => {
1445 unreachable!("buffer operations should never be applied at this layer")
1446 }
1447 Operation::UpdateDiagnostics {
1448 diagnostics: diagnostic_set,
1449 ..
1450 } => {
1451 let snapshot = self.snapshot();
1452 self.apply_diagnostic_update(
1453 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1454 cx,
1455 );
1456 }
1457 Operation::UpdateSelections {
1458 replica_id,
1459 selections,
1460 lamport_timestamp,
1461 } => {
1462 if let Some(set) = self.remote_selections.get(&replica_id) {
1463 if set.lamport_timestamp > lamport_timestamp {
1464 return;
1465 }
1466 }
1467
1468 self.remote_selections.insert(
1469 replica_id,
1470 SelectionSet {
1471 selections,
1472 lamport_timestamp,
1473 },
1474 );
1475 self.text.lamport_clock.observe(lamport_timestamp);
1476 self.selections_update_count += 1;
1477 }
1478 }
1479 }
1480
1481 fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
1482 self.diagnostics = diagnostics;
1483 self.diagnostics_update_count += 1;
1484 cx.notify();
1485 cx.emit(Event::DiagnosticsUpdated);
1486 }
1487
1488 #[cfg(not(test))]
1489 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1490 if let Some(file) = &self.file {
1491 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1492 }
1493 }
1494
1495 #[cfg(test)]
1496 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1497 self.operations.push(operation);
1498 }
1499
1500 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1501 self.remote_selections.remove(&replica_id);
1502 cx.notify();
1503 }
1504
1505 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1506 let was_dirty = self.is_dirty();
1507 let old_version = self.version.clone();
1508
1509 if let Some((transaction_id, operation)) = self.text.undo() {
1510 self.send_operation(Operation::Buffer(operation), cx);
1511 self.did_edit(&old_version, was_dirty, cx);
1512 Some(transaction_id)
1513 } else {
1514 None
1515 }
1516 }
1517
1518 pub fn undo_transaction(
1519 &mut self,
1520 transaction_id: TransactionId,
1521 cx: &mut ModelContext<Self>,
1522 ) -> bool {
1523 let was_dirty = self.is_dirty();
1524 let old_version = self.version.clone();
1525
1526 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1527 self.send_operation(Operation::Buffer(operation), cx);
1528 self.did_edit(&old_version, was_dirty, cx);
1529 true
1530 } else {
1531 false
1532 }
1533 }
1534
1535 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1536 let was_dirty = self.is_dirty();
1537 let old_version = self.version.clone();
1538
1539 if let Some((transaction_id, operation)) = self.text.redo() {
1540 self.send_operation(Operation::Buffer(operation), cx);
1541 self.did_edit(&old_version, was_dirty, cx);
1542 Some(transaction_id)
1543 } else {
1544 None
1545 }
1546 }
1547
1548 pub fn redo_transaction(
1549 &mut self,
1550 transaction_id: TransactionId,
1551 cx: &mut ModelContext<Self>,
1552 ) -> bool {
1553 let was_dirty = self.is_dirty();
1554 let old_version = self.version.clone();
1555
1556 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1557 self.send_operation(Operation::Buffer(operation), cx);
1558 self.did_edit(&old_version, was_dirty, cx);
1559 true
1560 } else {
1561 false
1562 }
1563 }
1564}
1565
1566#[cfg(any(test, feature = "test-support"))]
1567impl Buffer {
1568 pub fn set_group_interval(&mut self, group_interval: Duration) {
1569 self.text.set_group_interval(group_interval);
1570 }
1571
1572 pub fn randomly_edit<T>(
1573 &mut self,
1574 rng: &mut T,
1575 old_range_count: usize,
1576 cx: &mut ModelContext<Self>,
1577 ) where
1578 T: rand::Rng,
1579 {
1580 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1581 for _ in 0..old_range_count {
1582 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1583 if last_end > self.len() {
1584 break;
1585 }
1586 old_ranges.push(self.text.random_byte_range(last_end, rng));
1587 }
1588 let new_text_len = rng.gen_range(0..10);
1589 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1590 .take(new_text_len)
1591 .collect();
1592 log::info!(
1593 "mutating buffer {} at {:?}: {:?}",
1594 self.replica_id(),
1595 old_ranges,
1596 new_text
1597 );
1598 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1599 }
1600
1601 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1602 let was_dirty = self.is_dirty();
1603 let old_version = self.version.clone();
1604
1605 let ops = self.text.randomly_undo_redo(rng);
1606 if !ops.is_empty() {
1607 for op in ops {
1608 self.send_operation(Operation::Buffer(op), cx);
1609 self.did_edit(&old_version, was_dirty, cx);
1610 }
1611 }
1612 }
1613}
1614
1615impl Entity for Buffer {
1616 type Event = Event;
1617
1618 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1619 if let Some(file) = self.file.as_ref() {
1620 file.buffer_removed(self.remote_id(), cx);
1621 }
1622 }
1623}
1624
1625impl Deref for Buffer {
1626 type Target = TextBuffer;
1627
1628 fn deref(&self) -> &Self::Target {
1629 &self.text
1630 }
1631}
1632
1633impl BufferSnapshot {
1634 fn suggest_autoindents<'a>(
1635 &'a self,
1636 row_range: Range<u32>,
1637 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1638 let mut query_cursor = QueryCursorHandle::new();
1639 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1640 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1641
1642 // Get the "indentation ranges" that intersect this row range.
1643 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1644 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1645 query_cursor.set_point_range(
1646 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1647 ..Point::new(row_range.end, 0).to_ts_point(),
1648 );
1649 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1650 for mat in query_cursor.matches(
1651 &grammar.indents_query,
1652 tree.root_node(),
1653 TextProvider(self.as_rope()),
1654 ) {
1655 let mut node_kind = "";
1656 let mut start: Option<Point> = None;
1657 let mut end: Option<Point> = None;
1658 for capture in mat.captures {
1659 if Some(capture.index) == indent_capture_ix {
1660 node_kind = capture.node.kind();
1661 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1662 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1663 } else if Some(capture.index) == end_capture_ix {
1664 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1665 }
1666 }
1667
1668 if let Some((start, end)) = start.zip(end) {
1669 if start.row == end.row {
1670 continue;
1671 }
1672
1673 let range = start..end;
1674 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1675 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1676 Ok(ix) => {
1677 let prev_range = &mut indentation_ranges[ix];
1678 prev_range.0.end = prev_range.0.end.max(range.end);
1679 }
1680 }
1681 }
1682 }
1683
1684 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1685 Some(row_range.map(move |row| {
1686 let row_start = Point::new(row, self.indent_column_for_line(row));
1687
1688 let mut indent_from_prev_row = false;
1689 let mut outdent_to_row = u32::MAX;
1690 for (range, _node_kind) in &indentation_ranges {
1691 if range.start.row >= row {
1692 break;
1693 }
1694
1695 if range.start.row == prev_row && range.end > row_start {
1696 indent_from_prev_row = true;
1697 }
1698 if range.end.row >= prev_row && range.end <= row_start {
1699 outdent_to_row = outdent_to_row.min(range.start.row);
1700 }
1701 }
1702
1703 let suggestion = if outdent_to_row == prev_row {
1704 IndentSuggestion {
1705 basis_row: prev_row,
1706 indent: false,
1707 }
1708 } else if indent_from_prev_row {
1709 IndentSuggestion {
1710 basis_row: prev_row,
1711 indent: true,
1712 }
1713 } else if outdent_to_row < prev_row {
1714 IndentSuggestion {
1715 basis_row: outdent_to_row,
1716 indent: false,
1717 }
1718 } else {
1719 IndentSuggestion {
1720 basis_row: prev_row,
1721 indent: false,
1722 }
1723 };
1724
1725 prev_row = row;
1726 suggestion
1727 }))
1728 } else {
1729 None
1730 }
1731 }
1732
1733 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1734 while row > 0 {
1735 row -= 1;
1736 if !self.is_line_blank(row) {
1737 return Some(row);
1738 }
1739 }
1740 None
1741 }
1742
1743 pub fn chunks<'a, T: ToOffset>(
1744 &'a self,
1745 range: Range<T>,
1746 theme: Option<&'a SyntaxTheme>,
1747 ) -> BufferChunks<'a> {
1748 let range = range.start.to_offset(self)..range.end.to_offset(self);
1749
1750 let mut highlights = None;
1751 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1752 if let Some(theme) = theme {
1753 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
1754 diagnostic_endpoints.push(DiagnosticEndpoint {
1755 offset: entry.range.start,
1756 is_start: true,
1757 severity: entry.diagnostic.severity,
1758 });
1759 diagnostic_endpoints.push(DiagnosticEndpoint {
1760 offset: entry.range.end,
1761 is_start: false,
1762 severity: entry.diagnostic.severity,
1763 });
1764 }
1765 diagnostic_endpoints
1766 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1767
1768 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1769 let mut query_cursor = QueryCursorHandle::new();
1770
1771 // TODO - add a Tree-sitter API to remove the need for this.
1772 let cursor = unsafe {
1773 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1774 };
1775 let captures = cursor.set_byte_range(range.clone()).captures(
1776 &grammar.highlights_query,
1777 tree.root_node(),
1778 TextProvider(self.text.as_rope()),
1779 );
1780 highlights = Some(BufferChunkHighlights {
1781 captures,
1782 next_capture: None,
1783 stack: Default::default(),
1784 highlight_map: grammar.highlight_map(),
1785 _query_cursor: query_cursor,
1786 theme,
1787 })
1788 }
1789 }
1790
1791 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1792 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1793
1794 BufferChunks {
1795 range,
1796 chunks,
1797 diagnostic_endpoints,
1798 error_depth: 0,
1799 warning_depth: 0,
1800 information_depth: 0,
1801 hint_depth: 0,
1802 highlights,
1803 }
1804 }
1805
1806 pub fn language(&self) -> Option<&Arc<Language>> {
1807 self.language.as_ref()
1808 }
1809
1810 fn grammar(&self) -> Option<&Arc<Grammar>> {
1811 self.language
1812 .as_ref()
1813 .and_then(|language| language.grammar.as_ref())
1814 }
1815
1816 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1817 if let Some(tree) = self.tree.as_ref() {
1818 let root = tree.root_node();
1819 let range = range.start.to_offset(self)..range.end.to_offset(self);
1820 let mut node = root.descendant_for_byte_range(range.start, range.end);
1821 while node.map_or(false, |n| n.byte_range() == range) {
1822 node = node.unwrap().parent();
1823 }
1824 node.map(|n| n.byte_range())
1825 } else {
1826 None
1827 }
1828 }
1829
1830 pub fn enclosing_bracket_ranges<T: ToOffset>(
1831 &self,
1832 range: Range<T>,
1833 ) -> Option<(Range<usize>, Range<usize>)> {
1834 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1835 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1836 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1837
1838 // Find bracket pairs that *inclusively* contain the given range.
1839 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1840 let mut cursor = QueryCursorHandle::new();
1841 let matches = cursor.set_byte_range(range).matches(
1842 &grammar.brackets_query,
1843 tree.root_node(),
1844 TextProvider(self.as_rope()),
1845 );
1846
1847 // Get the ranges of the innermost pair of brackets.
1848 matches
1849 .filter_map(|mat| {
1850 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1851 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1852 Some((open.byte_range(), close.byte_range()))
1853 })
1854 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1855 }
1856
1857 pub fn remote_selections_in_range<'a>(
1858 &'a self,
1859 range: Range<Anchor>,
1860 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
1861 {
1862 self.remote_selections
1863 .iter()
1864 .filter(|(replica_id, set)| {
1865 **replica_id != self.text.replica_id() && !set.selections.is_empty()
1866 })
1867 .map(move |(replica_id, set)| {
1868 let start_ix = match set.selections.binary_search_by(|probe| {
1869 probe
1870 .end
1871 .cmp(&range.start, self)
1872 .unwrap()
1873 .then(Ordering::Greater)
1874 }) {
1875 Ok(ix) | Err(ix) => ix,
1876 };
1877 let end_ix = match set.selections.binary_search_by(|probe| {
1878 probe
1879 .start
1880 .cmp(&range.end, self)
1881 .unwrap()
1882 .then(Ordering::Less)
1883 }) {
1884 Ok(ix) | Err(ix) => ix,
1885 };
1886
1887 (*replica_id, set.selections[start_ix..end_ix].iter())
1888 })
1889 }
1890
1891 pub fn diagnostics_in_range<'a, T, O>(
1892 &'a self,
1893 search_range: Range<T>,
1894 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1895 where
1896 T: 'a + Clone + ToOffset,
1897 O: 'a + FromAnchor,
1898 {
1899 self.diagnostics.range(search_range.clone(), self, true)
1900 }
1901
1902 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
1903 let mut groups = Vec::new();
1904 self.diagnostics.groups(&mut groups, self);
1905 groups
1906 }
1907
1908 pub fn diagnostic_group<'a, O>(
1909 &'a self,
1910 group_id: usize,
1911 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1912 where
1913 O: 'a + FromAnchor,
1914 {
1915 self.diagnostics.group(group_id, self)
1916 }
1917
1918 pub fn diagnostics_update_count(&self) -> usize {
1919 self.diagnostics_update_count
1920 }
1921
1922 pub fn parse_count(&self) -> usize {
1923 self.parse_count
1924 }
1925
1926 pub fn selections_update_count(&self) -> usize {
1927 self.selections_update_count
1928 }
1929}
1930
1931impl Clone for BufferSnapshot {
1932 fn clone(&self) -> Self {
1933 Self {
1934 text: self.text.clone(),
1935 tree: self.tree.clone(),
1936 remote_selections: self.remote_selections.clone(),
1937 diagnostics: self.diagnostics.clone(),
1938 selections_update_count: self.selections_update_count,
1939 diagnostics_update_count: self.diagnostics_update_count,
1940 is_parsing: self.is_parsing,
1941 language: self.language.clone(),
1942 parse_count: self.parse_count,
1943 }
1944 }
1945}
1946
1947impl Deref for BufferSnapshot {
1948 type Target = text::BufferSnapshot;
1949
1950 fn deref(&self) -> &Self::Target {
1951 &self.text
1952 }
1953}
1954
1955impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1956 type I = ByteChunks<'a>;
1957
1958 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1959 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1960 }
1961}
1962
1963struct ByteChunks<'a>(rope::Chunks<'a>);
1964
1965impl<'a> Iterator for ByteChunks<'a> {
1966 type Item = &'a [u8];
1967
1968 fn next(&mut self) -> Option<Self::Item> {
1969 self.0.next().map(str::as_bytes)
1970 }
1971}
1972
1973unsafe impl<'a> Send for BufferChunks<'a> {}
1974
1975impl<'a> BufferChunks<'a> {
1976 pub fn seek(&mut self, offset: usize) {
1977 self.range.start = offset;
1978 self.chunks.seek(self.range.start);
1979 if let Some(highlights) = self.highlights.as_mut() {
1980 highlights
1981 .stack
1982 .retain(|(end_offset, _)| *end_offset > offset);
1983 if let Some((mat, capture_ix)) = &highlights.next_capture {
1984 let capture = mat.captures[*capture_ix as usize];
1985 if offset >= capture.node.start_byte() {
1986 let next_capture_end = capture.node.end_byte();
1987 if offset < next_capture_end {
1988 highlights.stack.push((
1989 next_capture_end,
1990 highlights.highlight_map.get(capture.index),
1991 ));
1992 }
1993 highlights.next_capture.take();
1994 }
1995 }
1996 highlights.captures.set_byte_range(self.range.clone());
1997 }
1998 }
1999
2000 pub fn offset(&self) -> usize {
2001 self.range.start
2002 }
2003
2004 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2005 let depth = match endpoint.severity {
2006 DiagnosticSeverity::ERROR => &mut self.error_depth,
2007 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2008 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2009 DiagnosticSeverity::HINT => &mut self.hint_depth,
2010 _ => return,
2011 };
2012 if endpoint.is_start {
2013 *depth += 1;
2014 } else {
2015 *depth -= 1;
2016 }
2017 }
2018
2019 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2020 if self.error_depth > 0 {
2021 Some(DiagnosticSeverity::ERROR)
2022 } else if self.warning_depth > 0 {
2023 Some(DiagnosticSeverity::WARNING)
2024 } else if self.information_depth > 0 {
2025 Some(DiagnosticSeverity::INFORMATION)
2026 } else if self.hint_depth > 0 {
2027 Some(DiagnosticSeverity::HINT)
2028 } else {
2029 None
2030 }
2031 }
2032}
2033
2034impl<'a> Iterator for BufferChunks<'a> {
2035 type Item = Chunk<'a>;
2036
2037 fn next(&mut self) -> Option<Self::Item> {
2038 let mut next_capture_start = usize::MAX;
2039 let mut next_diagnostic_endpoint = usize::MAX;
2040
2041 if let Some(highlights) = self.highlights.as_mut() {
2042 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2043 if *parent_capture_end <= self.range.start {
2044 highlights.stack.pop();
2045 } else {
2046 break;
2047 }
2048 }
2049
2050 if highlights.next_capture.is_none() {
2051 highlights.next_capture = highlights.captures.next();
2052 }
2053
2054 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2055 let capture = mat.captures[*capture_ix as usize];
2056 if self.range.start < capture.node.start_byte() {
2057 next_capture_start = capture.node.start_byte();
2058 break;
2059 } else {
2060 let highlight_id = highlights.highlight_map.get(capture.index);
2061 highlights
2062 .stack
2063 .push((capture.node.end_byte(), highlight_id));
2064 highlights.next_capture = highlights.captures.next();
2065 }
2066 }
2067 }
2068
2069 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2070 if endpoint.offset <= self.range.start {
2071 self.update_diagnostic_depths(endpoint);
2072 self.diagnostic_endpoints.next();
2073 } else {
2074 next_diagnostic_endpoint = endpoint.offset;
2075 break;
2076 }
2077 }
2078
2079 if let Some(chunk) = self.chunks.peek() {
2080 let chunk_start = self.range.start;
2081 let mut chunk_end = (self.chunks.offset() + chunk.len())
2082 .min(next_capture_start)
2083 .min(next_diagnostic_endpoint);
2084 let mut highlight_style = None;
2085 if let Some(highlights) = self.highlights.as_ref() {
2086 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2087 chunk_end = chunk_end.min(*parent_capture_end);
2088 highlight_style = parent_highlight_id.style(highlights.theme);
2089 }
2090 }
2091
2092 let slice =
2093 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2094 self.range.start = chunk_end;
2095 if self.range.start == self.chunks.offset() + chunk.len() {
2096 self.chunks.next().unwrap();
2097 }
2098
2099 Some(Chunk {
2100 text: slice,
2101 highlight_style,
2102 diagnostic: self.current_diagnostic_severity(),
2103 })
2104 } else {
2105 None
2106 }
2107 }
2108}
2109
2110impl QueryCursorHandle {
2111 fn new() -> Self {
2112 QueryCursorHandle(Some(
2113 QUERY_CURSORS
2114 .lock()
2115 .pop()
2116 .unwrap_or_else(|| QueryCursor::new()),
2117 ))
2118 }
2119}
2120
2121impl Deref for QueryCursorHandle {
2122 type Target = QueryCursor;
2123
2124 fn deref(&self) -> &Self::Target {
2125 self.0.as_ref().unwrap()
2126 }
2127}
2128
2129impl DerefMut for QueryCursorHandle {
2130 fn deref_mut(&mut self) -> &mut Self::Target {
2131 self.0.as_mut().unwrap()
2132 }
2133}
2134
2135impl Drop for QueryCursorHandle {
2136 fn drop(&mut self) {
2137 let mut cursor = self.0.take().unwrap();
2138 cursor.set_byte_range(0..usize::MAX);
2139 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2140 QUERY_CURSORS.lock().push(cursor)
2141 }
2142}
2143
2144trait ToTreeSitterPoint {
2145 fn to_ts_point(self) -> tree_sitter::Point;
2146 fn from_ts_point(point: tree_sitter::Point) -> Self;
2147}
2148
2149impl ToTreeSitterPoint for Point {
2150 fn to_ts_point(self) -> tree_sitter::Point {
2151 tree_sitter::Point::new(self.row as usize, self.column as usize)
2152 }
2153
2154 fn from_ts_point(point: tree_sitter::Point) -> Self {
2155 Point::new(point.row as u32, point.column as u32)
2156 }
2157}
2158
2159impl operation_queue::Operation for Operation {
2160 fn lamport_timestamp(&self) -> clock::Lamport {
2161 match self {
2162 Operation::Buffer(_) => {
2163 unreachable!("buffer operations should never be deferred at this layer")
2164 }
2165 Operation::UpdateDiagnostics {
2166 lamport_timestamp, ..
2167 }
2168 | Operation::UpdateSelections {
2169 lamport_timestamp, ..
2170 } => *lamport_timestamp,
2171 }
2172 }
2173}
2174
2175impl Default for Diagnostic {
2176 fn default() -> Self {
2177 Self {
2178 code: Default::default(),
2179 severity: DiagnosticSeverity::ERROR,
2180 message: Default::default(),
2181 group_id: Default::default(),
2182 is_primary: Default::default(),
2183 is_valid: true,
2184 is_disk_based: false,
2185 }
2186 }
2187}
2188
2189pub fn contiguous_ranges(
2190 values: impl Iterator<Item = u32>,
2191 max_len: usize,
2192) -> impl Iterator<Item = Range<u32>> {
2193 let mut values = values.into_iter();
2194 let mut current_range: Option<Range<u32>> = None;
2195 std::iter::from_fn(move || loop {
2196 if let Some(value) = values.next() {
2197 if let Some(range) = &mut current_range {
2198 if value == range.end && range.len() < max_len {
2199 range.end += 1;
2200 continue;
2201 }
2202 }
2203
2204 let prev_range = current_range.clone();
2205 current_range = Some(value..(value + 1));
2206 if prev_range.is_some() {
2207 return prev_range;
2208 }
2209 } else {
2210 return current_range.take();
2211 }
2212 })
2213}