1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, Outline,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cell::RefCell,
25 cmp::{self, Ordering},
26 collections::{BTreeMap, HashMap},
27 ffi::OsString,
28 future::Future,
29 iter::{Iterator, Peekable},
30 ops::{Deref, DerefMut, Range, Sub},
31 path::{Path, PathBuf},
32 str,
33 sync::Arc,
34 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
35 vec,
36};
37use sum_tree::TreeMap;
38use text::{operation_queue::OperationQueue, rope::TextDimension};
39pub use text::{Buffer as TextBuffer, Operation as _, *};
40use theme::SyntaxTheme;
41use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
42use util::{post_inc, TryFutureExt as _};
43
44#[cfg(any(test, feature = "test-support"))]
45pub use tree_sitter_rust;
46
47pub use lsp::DiagnosticSeverity;
48
49thread_local! {
50 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
51}
52
53lazy_static! {
54 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
55}
56
57// TODO - Make this configurable
58const INDENT_SIZE: u32 = 4;
59
60pub struct Buffer {
61 text: TextBuffer,
62 file: Option<Box<dyn File>>,
63 saved_version: clock::Global,
64 saved_mtime: SystemTime,
65 language: Option<Arc<Language>>,
66 autoindent_requests: Vec<Arc<AutoindentRequest>>,
67 pending_autoindent: Option<Task<()>>,
68 sync_parse_timeout: Duration,
69 syntax_tree: Mutex<Option<SyntaxTree>>,
70 parsing_in_background: bool,
71 parse_count: usize,
72 diagnostics: DiagnosticSet,
73 remote_selections: TreeMap<ReplicaId, SelectionSet>,
74 selections_update_count: usize,
75 diagnostics_update_count: usize,
76 language_server: Option<LanguageServerState>,
77 deferred_ops: OperationQueue<Operation>,
78 #[cfg(test)]
79 pub(crate) operations: Vec<Operation>,
80}
81
82pub struct BufferSnapshot {
83 text: text::BufferSnapshot,
84 tree: Option<Tree>,
85 diagnostics: DiagnosticSet,
86 diagnostics_update_count: usize,
87 remote_selections: TreeMap<ReplicaId, SelectionSet>,
88 selections_update_count: usize,
89 is_parsing: bool,
90 language: Option<Arc<Language>>,
91 parse_count: usize,
92}
93
94#[derive(Clone, Debug)]
95struct SelectionSet {
96 selections: Arc<[Selection<Anchor>]>,
97 lamport_timestamp: clock::Lamport,
98}
99
100#[derive(Clone, Debug, PartialEq, Eq)]
101pub struct GroupId {
102 source: Arc<str>,
103 id: usize,
104}
105
106#[derive(Clone, Debug, PartialEq, Eq)]
107pub struct Diagnostic {
108 pub code: Option<String>,
109 pub severity: DiagnosticSeverity,
110 pub message: String,
111 pub group_id: usize,
112 pub is_valid: bool,
113 pub is_primary: bool,
114 pub is_disk_based: bool,
115}
116
117struct LanguageServerState {
118 server: Arc<LanguageServer>,
119 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
120 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
121 next_version: usize,
122 _maintain_server: Task<Option<()>>,
123}
124
125#[derive(Clone)]
126struct LanguageServerSnapshot {
127 buffer_snapshot: text::BufferSnapshot,
128 version: usize,
129 path: Arc<Path>,
130}
131
132#[derive(Clone, Debug)]
133pub enum Operation {
134 Buffer(text::Operation),
135 UpdateDiagnostics {
136 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
137 lamport_timestamp: clock::Lamport,
138 },
139 UpdateSelections {
140 replica_id: ReplicaId,
141 selections: Arc<[Selection<Anchor>]>,
142 lamport_timestamp: clock::Lamport,
143 },
144}
145
146#[derive(Clone, Debug, Eq, PartialEq)]
147pub enum Event {
148 Edited,
149 Dirtied,
150 Saved,
151 FileHandleChanged,
152 Reloaded,
153 Reparsed,
154 DiagnosticsUpdated,
155 Closed,
156}
157
158pub trait File {
159 fn mtime(&self) -> SystemTime;
160
161 /// Returns the path of this file relative to the worktree's root directory.
162 fn path(&self) -> &Arc<Path>;
163
164 /// Returns the absolute path of this file.
165 fn abs_path(&self) -> Option<PathBuf>;
166
167 /// Returns the path of this file relative to the worktree's parent directory (this means it
168 /// includes the name of the worktree's root folder).
169 fn full_path(&self) -> PathBuf;
170
171 /// Returns the last component of this handle's absolute path. If this handle refers to the root
172 /// of its worktree, then this method will return the name of the worktree itself.
173 fn file_name(&self) -> Option<OsString>;
174
175 fn is_deleted(&self) -> bool;
176
177 fn save(
178 &self,
179 buffer_id: u64,
180 text: Rope,
181 version: clock::Global,
182 cx: &mut MutableAppContext,
183 ) -> Task<Result<(clock::Global, SystemTime)>>;
184
185 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
186
187 fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
188 -> Option<Task<Result<()>>>;
189
190 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
191
192 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
193
194 fn as_any(&self) -> &dyn Any;
195}
196
197pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
198
199#[derive(Clone)]
200struct SyntaxTree {
201 tree: Tree,
202 version: clock::Global,
203}
204
205#[derive(Clone)]
206struct AutoindentRequest {
207 before_edit: BufferSnapshot,
208 edited: Vec<Anchor>,
209 inserted: Option<Vec<Range<Anchor>>>,
210}
211
212#[derive(Debug)]
213struct IndentSuggestion {
214 basis_row: u32,
215 indent: bool,
216}
217
218struct TextProvider<'a>(&'a Rope);
219
220struct BufferChunkHighlights<'a> {
221 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
222 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
223 stack: Vec<(usize, HighlightId)>,
224 highlight_map: HighlightMap,
225 theme: &'a SyntaxTheme,
226 _query_cursor: QueryCursorHandle,
227}
228
229pub struct BufferChunks<'a> {
230 range: Range<usize>,
231 chunks: rope::Chunks<'a>,
232 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
233 error_depth: usize,
234 warning_depth: usize,
235 information_depth: usize,
236 hint_depth: usize,
237 highlights: Option<BufferChunkHighlights<'a>>,
238}
239
240#[derive(Clone, Copy, Debug, Default)]
241pub struct Chunk<'a> {
242 pub text: &'a str,
243 pub highlight_style: Option<HighlightStyle>,
244 pub diagnostic: Option<DiagnosticSeverity>,
245}
246
247pub(crate) struct Diff {
248 base_version: clock::Global,
249 new_text: Arc<str>,
250 changes: Vec<(ChangeTag, usize)>,
251}
252
253#[derive(Clone, Copy)]
254struct DiagnosticEndpoint {
255 offset: usize,
256 is_start: bool,
257 severity: DiagnosticSeverity,
258}
259
260impl Buffer {
261 pub fn new<T: Into<Arc<str>>>(
262 replica_id: ReplicaId,
263 base_text: T,
264 cx: &mut ModelContext<Self>,
265 ) -> Self {
266 Self::build(
267 TextBuffer::new(
268 replica_id,
269 cx.model_id() as u64,
270 History::new(base_text.into()),
271 ),
272 None,
273 )
274 }
275
276 pub fn from_file<T: Into<Arc<str>>>(
277 replica_id: ReplicaId,
278 base_text: T,
279 file: Box<dyn File>,
280 cx: &mut ModelContext<Self>,
281 ) -> Self {
282 Self::build(
283 TextBuffer::new(
284 replica_id,
285 cx.model_id() as u64,
286 History::new(base_text.into()),
287 ),
288 Some(file),
289 )
290 }
291
292 pub fn from_proto(
293 replica_id: ReplicaId,
294 message: proto::Buffer,
295 file: Option<Box<dyn File>>,
296 cx: &mut ModelContext<Self>,
297 ) -> Result<Self> {
298 let fragments_len = message.fragments.len();
299 let buffer = TextBuffer::from_parts(
300 replica_id,
301 message.id,
302 &message.visible_text,
303 &message.deleted_text,
304 message
305 .undo_map
306 .into_iter()
307 .map(proto::deserialize_undo_map_entry),
308 message
309 .fragments
310 .into_iter()
311 .enumerate()
312 .map(|(i, fragment)| {
313 proto::deserialize_buffer_fragment(fragment, i, fragments_len)
314 }),
315 message.lamport_timestamp,
316 From::from(message.version),
317 );
318 let mut this = Self::build(buffer, file);
319 for selection_set in message.selections {
320 this.remote_selections.insert(
321 selection_set.replica_id as ReplicaId,
322 SelectionSet {
323 selections: proto::deserialize_selections(selection_set.selections),
324 lamport_timestamp: clock::Lamport {
325 replica_id: selection_set.replica_id as ReplicaId,
326 value: selection_set.lamport_timestamp,
327 },
328 },
329 );
330 }
331 let snapshot = this.snapshot();
332 let entries = proto::deserialize_diagnostics(message.diagnostics);
333 this.apply_diagnostic_update(
334 DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
335 cx,
336 );
337
338 let deferred_ops = message
339 .deferred_operations
340 .into_iter()
341 .map(proto::deserialize_operation)
342 .collect::<Result<Vec<_>>>()?;
343 this.apply_ops(deferred_ops, cx)?;
344
345 Ok(this)
346 }
347
348 pub fn to_proto(&self) -> proto::Buffer {
349 proto::Buffer {
350 id: self.remote_id(),
351 visible_text: self.text.text(),
352 deleted_text: self.text.deleted_text(),
353 undo_map: self
354 .text
355 .undo_history()
356 .map(proto::serialize_undo_map_entry)
357 .collect(),
358 version: From::from(&self.version),
359 lamport_timestamp: self.lamport_clock.value,
360 fragments: self
361 .text
362 .fragments()
363 .map(proto::serialize_buffer_fragment)
364 .collect(),
365 selections: self
366 .remote_selections
367 .iter()
368 .map(|(replica_id, set)| proto::SelectionSet {
369 replica_id: *replica_id as u32,
370 selections: proto::serialize_selections(&set.selections),
371 lamport_timestamp: set.lamport_timestamp.value,
372 })
373 .collect(),
374 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
375 deferred_operations: self
376 .deferred_ops
377 .iter()
378 .map(proto::serialize_operation)
379 .chain(
380 self.text
381 .deferred_ops()
382 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
383 )
384 .collect(),
385 }
386 }
387
388 pub fn with_language(
389 mut self,
390 language: Option<Arc<Language>>,
391 language_server: Option<Arc<LanguageServer>>,
392 cx: &mut ModelContext<Self>,
393 ) -> Self {
394 self.set_language(language, language_server, cx);
395 self
396 }
397
398 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
399 let saved_mtime;
400 if let Some(file) = file.as_ref() {
401 saved_mtime = file.mtime();
402 } else {
403 saved_mtime = UNIX_EPOCH;
404 }
405
406 Self {
407 saved_mtime,
408 saved_version: buffer.version(),
409 text: buffer,
410 file,
411 syntax_tree: Mutex::new(None),
412 parsing_in_background: false,
413 parse_count: 0,
414 sync_parse_timeout: Duration::from_millis(1),
415 autoindent_requests: Default::default(),
416 pending_autoindent: Default::default(),
417 language: None,
418 remote_selections: Default::default(),
419 selections_update_count: 0,
420 diagnostics: Default::default(),
421 diagnostics_update_count: 0,
422 language_server: None,
423 deferred_ops: OperationQueue::new(),
424 #[cfg(test)]
425 operations: Default::default(),
426 }
427 }
428
429 pub fn snapshot(&self) -> BufferSnapshot {
430 BufferSnapshot {
431 text: self.text.snapshot(),
432 tree: self.syntax_tree(),
433 remote_selections: self.remote_selections.clone(),
434 diagnostics: self.diagnostics.clone(),
435 diagnostics_update_count: self.diagnostics_update_count,
436 is_parsing: self.parsing_in_background,
437 language: self.language.clone(),
438 parse_count: self.parse_count,
439 selections_update_count: self.selections_update_count,
440 }
441 }
442
443 pub fn file(&self) -> Option<&dyn File> {
444 self.file.as_deref()
445 }
446
447 pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
448 let file = if let Some(file) = self.file.as_ref() {
449 file
450 } else {
451 return Task::ready(Err(anyhow!("buffer has no file")));
452 };
453
454 if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
455 let server = server.clone();
456 let abs_path = file.abs_path().unwrap();
457 let version = self.version();
458 cx.spawn(|this, mut cx| async move {
459 let edits = server
460 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
461 text_document: lsp::TextDocumentIdentifier::new(
462 lsp::Url::from_file_path(&abs_path).unwrap(),
463 ),
464 options: Default::default(),
465 work_done_progress_params: Default::default(),
466 })
467 .await?;
468
469 if let Some(edits) = edits {
470 this.update(&mut cx, |this, cx| {
471 if this.version == version {
472 for edit in &edits {
473 let range = range_from_lsp(edit.range);
474 if this.clip_point_utf16(range.start, Bias::Left) != range.start
475 || this.clip_point_utf16(range.end, Bias::Left) != range.end
476 {
477 return Err(anyhow!(
478 "invalid formatting edits received from language server"
479 ));
480 }
481 }
482
483 for edit in edits.into_iter().rev() {
484 this.edit([range_from_lsp(edit.range)], edit.new_text, cx);
485 }
486 Ok(())
487 } else {
488 Err(anyhow!("buffer edited since starting to format"))
489 }
490 })
491 } else {
492 Ok(())
493 }
494 })
495 } else {
496 let format = file.format_remote(self.remote_id(), cx.as_mut());
497 cx.spawn(|_, _| async move {
498 if let Some(format) = format {
499 format.await?;
500 }
501 Ok(())
502 })
503 }
504 }
505
506 pub fn save(
507 &mut self,
508 cx: &mut ModelContext<Self>,
509 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
510 let file = self
511 .file
512 .as_ref()
513 .ok_or_else(|| anyhow!("buffer has no file"))?;
514 let text = self.as_rope().clone();
515 let version = self.version();
516 let save = file.save(self.remote_id(), text, version, cx.as_mut());
517 Ok(cx.spawn(|this, mut cx| async move {
518 let (version, mtime) = save.await?;
519 this.update(&mut cx, |this, cx| {
520 this.did_save(version.clone(), mtime, None, cx);
521 });
522 Ok((version, mtime))
523 }))
524 }
525
526 pub fn set_language(
527 &mut self,
528 language: Option<Arc<Language>>,
529 language_server: Option<Arc<lsp::LanguageServer>>,
530 cx: &mut ModelContext<Self>,
531 ) {
532 self.language = language;
533 self.language_server = if let Some(server) = language_server {
534 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
535 Some(LanguageServerState {
536 latest_snapshot: latest_snapshot_tx,
537 pending_snapshots: Default::default(),
538 next_version: 0,
539 server: server.clone(),
540 _maintain_server: cx.background().spawn(
541 async move {
542 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
543 while let Some(snapshot) = latest_snapshot_rx.recv().await {
544 if let Some(snapshot) = snapshot {
545 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
546 if let Some(prev_snapshot) = prev_snapshot {
547 let changes = lsp::DidChangeTextDocumentParams {
548 text_document: lsp::VersionedTextDocumentIdentifier::new(
549 uri,
550 snapshot.version as i32,
551 ),
552 content_changes: snapshot
553 .buffer_snapshot
554 .edits_since::<(PointUtf16, usize)>(
555 prev_snapshot.buffer_snapshot.version(),
556 )
557 .map(|edit| {
558 let edit_start = edit.new.start.0;
559 let edit_end = edit_start
560 + (edit.old.end.0 - edit.old.start.0);
561 let new_text = snapshot
562 .buffer_snapshot
563 .text_for_range(
564 edit.new.start.1..edit.new.end.1,
565 )
566 .collect();
567 lsp::TextDocumentContentChangeEvent {
568 range: Some(lsp::Range::new(
569 lsp::Position::new(
570 edit_start.row,
571 edit_start.column,
572 ),
573 lsp::Position::new(
574 edit_end.row,
575 edit_end.column,
576 ),
577 )),
578 range_length: None,
579 text: new_text,
580 }
581 })
582 .collect(),
583 };
584 server
585 .notify::<lsp::notification::DidChangeTextDocument>(changes)
586 .await?;
587 } else {
588 server
589 .notify::<lsp::notification::DidOpenTextDocument>(
590 lsp::DidOpenTextDocumentParams {
591 text_document: lsp::TextDocumentItem::new(
592 uri,
593 Default::default(),
594 snapshot.version as i32,
595 snapshot.buffer_snapshot.text().to_string(),
596 ),
597 },
598 )
599 .await?;
600 }
601
602 prev_snapshot = Some(snapshot);
603 }
604 }
605 Ok(())
606 }
607 .log_err(),
608 ),
609 })
610 } else {
611 None
612 };
613
614 self.reparse(cx);
615 self.update_language_server();
616 }
617
618 pub fn did_save(
619 &mut self,
620 version: clock::Global,
621 mtime: SystemTime,
622 new_file: Option<Box<dyn File>>,
623 cx: &mut ModelContext<Self>,
624 ) {
625 self.saved_mtime = mtime;
626 self.saved_version = version;
627 if let Some(new_file) = new_file {
628 self.file = Some(new_file);
629 }
630 if let Some(state) = &self.language_server {
631 cx.background()
632 .spawn(
633 state
634 .server
635 .notify::<lsp::notification::DidSaveTextDocument>(
636 lsp::DidSaveTextDocumentParams {
637 text_document: lsp::TextDocumentIdentifier {
638 uri: lsp::Url::from_file_path(
639 self.file.as_ref().unwrap().abs_path().unwrap(),
640 )
641 .unwrap(),
642 },
643 text: None,
644 },
645 ),
646 )
647 .detach()
648 }
649 cx.emit(Event::Saved);
650 }
651
652 pub fn file_updated(
653 &mut self,
654 new_file: Box<dyn File>,
655 cx: &mut ModelContext<Self>,
656 ) -> Option<Task<()>> {
657 let old_file = self.file.as_ref()?;
658 let mut file_changed = false;
659 let mut task = None;
660
661 if new_file.path() != old_file.path() {
662 file_changed = true;
663 }
664
665 if new_file.is_deleted() {
666 if !old_file.is_deleted() {
667 file_changed = true;
668 if !self.is_dirty() {
669 cx.emit(Event::Dirtied);
670 }
671 }
672 } else {
673 let new_mtime = new_file.mtime();
674 if new_mtime != old_file.mtime() {
675 file_changed = true;
676
677 if !self.is_dirty() {
678 task = Some(cx.spawn(|this, mut cx| {
679 async move {
680 let new_text = this.read_with(&cx, |this, cx| {
681 this.file.as_ref().and_then(|file| file.load_local(cx))
682 });
683 if let Some(new_text) = new_text {
684 let new_text = new_text.await?;
685 let diff = this
686 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
687 .await;
688 this.update(&mut cx, |this, cx| {
689 if this.apply_diff(diff, cx) {
690 this.saved_version = this.version();
691 this.saved_mtime = new_mtime;
692 cx.emit(Event::Reloaded);
693 }
694 });
695 }
696 Ok(())
697 }
698 .log_err()
699 .map(drop)
700 }));
701 }
702 }
703 }
704
705 if file_changed {
706 cx.emit(Event::FileHandleChanged);
707 }
708 self.file = Some(new_file);
709 task
710 }
711
712 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
713 cx.emit(Event::Closed);
714 }
715
716 pub fn language(&self) -> Option<&Arc<Language>> {
717 self.language.as_ref()
718 }
719
720 pub fn parse_count(&self) -> usize {
721 self.parse_count
722 }
723
724 pub fn selections_update_count(&self) -> usize {
725 self.selections_update_count
726 }
727
728 pub fn diagnostics_update_count(&self) -> usize {
729 self.diagnostics_update_count
730 }
731
732 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
733 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
734 self.interpolate_tree(syntax_tree);
735 Some(syntax_tree.tree.clone())
736 } else {
737 None
738 }
739 }
740
741 #[cfg(any(test, feature = "test-support"))]
742 pub fn is_parsing(&self) -> bool {
743 self.parsing_in_background
744 }
745
746 #[cfg(test)]
747 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
748 self.sync_parse_timeout = timeout;
749 }
750
751 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
752 if self.parsing_in_background {
753 return false;
754 }
755
756 if let Some(grammar) = self.grammar().cloned() {
757 let old_tree = self.syntax_tree();
758 let text = self.as_rope().clone();
759 let parsed_version = self.version();
760 let parse_task = cx.background().spawn({
761 let grammar = grammar.clone();
762 async move { Self::parse_text(&text, old_tree, &grammar) }
763 });
764
765 match cx
766 .background()
767 .block_with_timeout(self.sync_parse_timeout, parse_task)
768 {
769 Ok(new_tree) => {
770 self.did_finish_parsing(new_tree, parsed_version, cx);
771 return true;
772 }
773 Err(parse_task) => {
774 self.parsing_in_background = true;
775 cx.spawn(move |this, mut cx| async move {
776 let new_tree = parse_task.await;
777 this.update(&mut cx, move |this, cx| {
778 let grammar_changed = this
779 .grammar()
780 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
781 let parse_again =
782 this.version.changed_since(&parsed_version) || grammar_changed;
783 this.parsing_in_background = false;
784 this.did_finish_parsing(new_tree, parsed_version, cx);
785
786 if parse_again && this.reparse(cx) {
787 return;
788 }
789 });
790 })
791 .detach();
792 }
793 }
794 }
795 false
796 }
797
798 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
799 PARSER.with(|parser| {
800 let mut parser = parser.borrow_mut();
801 parser
802 .set_language(grammar.ts_language)
803 .expect("incompatible grammar");
804 let mut chunks = text.chunks_in_range(0..text.len());
805 let tree = parser
806 .parse_with(
807 &mut move |offset, _| {
808 chunks.seek(offset);
809 chunks.next().unwrap_or("").as_bytes()
810 },
811 old_tree.as_ref(),
812 )
813 .unwrap();
814 tree
815 })
816 }
817
818 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
819 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
820 let (bytes, lines) = edit.flatten();
821 tree.tree.edit(&InputEdit {
822 start_byte: bytes.new.start,
823 old_end_byte: bytes.new.start + bytes.old.len(),
824 new_end_byte: bytes.new.end,
825 start_position: lines.new.start.to_ts_point(),
826 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
827 .to_ts_point(),
828 new_end_position: lines.new.end.to_ts_point(),
829 });
830 }
831 tree.version = self.version();
832 }
833
834 fn did_finish_parsing(
835 &mut self,
836 tree: Tree,
837 version: clock::Global,
838 cx: &mut ModelContext<Self>,
839 ) {
840 self.parse_count += 1;
841 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
842 self.request_autoindent(cx);
843 cx.emit(Event::Reparsed);
844 cx.notify();
845 }
846
847 pub fn update_diagnostics<T>(
848 &mut self,
849 version: Option<i32>,
850 mut diagnostics: Vec<DiagnosticEntry<T>>,
851 cx: &mut ModelContext<Self>,
852 ) -> Result<Operation>
853 where
854 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
855 {
856 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
857 Ordering::Equal
858 .then_with(|| b.is_primary.cmp(&a.is_primary))
859 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
860 .then_with(|| a.severity.cmp(&b.severity))
861 .then_with(|| a.message.cmp(&b.message))
862 }
863
864 let version = version.map(|version| version as usize);
865 let content = if let Some(version) = version {
866 let language_server = self.language_server.as_mut().unwrap();
867 language_server
868 .pending_snapshots
869 .retain(|&v, _| v >= version);
870 let snapshot = language_server
871 .pending_snapshots
872 .get(&version)
873 .ok_or_else(|| anyhow!("missing snapshot"))?;
874 &snapshot.buffer_snapshot
875 } else {
876 self.deref()
877 };
878
879 diagnostics.sort_unstable_by(|a, b| {
880 Ordering::Equal
881 .then_with(|| a.range.start.cmp(&b.range.start))
882 .then_with(|| b.range.end.cmp(&a.range.end))
883 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
884 });
885
886 let mut sanitized_diagnostics = Vec::new();
887 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
888 let mut last_edit_old_end = T::default();
889 let mut last_edit_new_end = T::default();
890 'outer: for entry in diagnostics {
891 let mut start = entry.range.start;
892 let mut end = entry.range.end;
893
894 // Some diagnostics are based on files on disk instead of buffers'
895 // current contents. Adjust these diagnostics' ranges to reflect
896 // any unsaved edits.
897 if entry.diagnostic.is_disk_based {
898 while let Some(edit) = edits_since_save.peek() {
899 if edit.old.end <= start {
900 last_edit_old_end = edit.old.end;
901 last_edit_new_end = edit.new.end;
902 edits_since_save.next();
903 } else if edit.old.start <= end && edit.old.end >= start {
904 continue 'outer;
905 } else {
906 break;
907 }
908 }
909
910 let start_overshoot = start - last_edit_old_end;
911 start = last_edit_new_end;
912 start.add_assign(&start_overshoot);
913
914 let end_overshoot = end - last_edit_old_end;
915 end = last_edit_new_end;
916 end.add_assign(&end_overshoot);
917 }
918
919 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
920 let mut range = range.start.to_point(content)..range.end.to_point(content);
921 // Expand empty ranges by one character
922 if range.start == range.end {
923 range.end.column += 1;
924 range.end = content.clip_point(range.end, Bias::Right);
925 if range.start == range.end && range.end.column > 0 {
926 range.start.column -= 1;
927 range.start = content.clip_point(range.start, Bias::Left);
928 }
929 }
930
931 sanitized_diagnostics.push(DiagnosticEntry {
932 range,
933 diagnostic: entry.diagnostic,
934 });
935 }
936 drop(edits_since_save);
937
938 let set = DiagnosticSet::new(sanitized_diagnostics, content);
939 self.apply_diagnostic_update(set.clone(), cx);
940 Ok(Operation::UpdateDiagnostics {
941 diagnostics: set.iter().cloned().collect(),
942 lamport_timestamp: self.text.lamport_clock.tick(),
943 })
944 }
945
946 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
947 if let Some(indent_columns) = self.compute_autoindents() {
948 let indent_columns = cx.background().spawn(indent_columns);
949 match cx
950 .background()
951 .block_with_timeout(Duration::from_micros(500), indent_columns)
952 {
953 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
954 Err(indent_columns) => {
955 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
956 let indent_columns = indent_columns.await;
957 this.update(&mut cx, |this, cx| {
958 this.apply_autoindents(indent_columns, cx);
959 });
960 }));
961 }
962 }
963 }
964 }
965
966 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
967 let max_rows_between_yields = 100;
968 let snapshot = self.snapshot();
969 if snapshot.language.is_none()
970 || snapshot.tree.is_none()
971 || self.autoindent_requests.is_empty()
972 {
973 return None;
974 }
975
976 let autoindent_requests = self.autoindent_requests.clone();
977 Some(async move {
978 let mut indent_columns = BTreeMap::new();
979 for request in autoindent_requests {
980 let old_to_new_rows = request
981 .edited
982 .iter()
983 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
984 .zip(
985 request
986 .edited
987 .iter()
988 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
989 )
990 .collect::<BTreeMap<u32, u32>>();
991
992 let mut old_suggestions = HashMap::<u32, u32>::default();
993 let old_edited_ranges =
994 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
995 for old_edited_range in old_edited_ranges {
996 let suggestions = request
997 .before_edit
998 .suggest_autoindents(old_edited_range.clone())
999 .into_iter()
1000 .flatten();
1001 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1002 let indentation_basis = old_to_new_rows
1003 .get(&suggestion.basis_row)
1004 .and_then(|from_row| old_suggestions.get(from_row).copied())
1005 .unwrap_or_else(|| {
1006 request
1007 .before_edit
1008 .indent_column_for_line(suggestion.basis_row)
1009 });
1010 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1011 old_suggestions.insert(
1012 *old_to_new_rows.get(&old_row).unwrap(),
1013 indentation_basis + delta,
1014 );
1015 }
1016 yield_now().await;
1017 }
1018
1019 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1020 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1021 let new_edited_row_ranges =
1022 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1023 for new_edited_row_range in new_edited_row_ranges {
1024 let suggestions = snapshot
1025 .suggest_autoindents(new_edited_row_range.clone())
1026 .into_iter()
1027 .flatten();
1028 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1029 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1030 let new_indentation = indent_columns
1031 .get(&suggestion.basis_row)
1032 .copied()
1033 .unwrap_or_else(|| {
1034 snapshot.indent_column_for_line(suggestion.basis_row)
1035 })
1036 + delta;
1037 if old_suggestions
1038 .get(&new_row)
1039 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1040 {
1041 indent_columns.insert(new_row, new_indentation);
1042 }
1043 }
1044 yield_now().await;
1045 }
1046
1047 if let Some(inserted) = request.inserted.as_ref() {
1048 let inserted_row_ranges = contiguous_ranges(
1049 inserted
1050 .iter()
1051 .map(|range| range.to_point(&snapshot))
1052 .flat_map(|range| range.start.row..range.end.row + 1),
1053 max_rows_between_yields,
1054 );
1055 for inserted_row_range in inserted_row_ranges {
1056 let suggestions = snapshot
1057 .suggest_autoindents(inserted_row_range.clone())
1058 .into_iter()
1059 .flatten();
1060 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1061 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1062 let new_indentation = indent_columns
1063 .get(&suggestion.basis_row)
1064 .copied()
1065 .unwrap_or_else(|| {
1066 snapshot.indent_column_for_line(suggestion.basis_row)
1067 })
1068 + delta;
1069 indent_columns.insert(row, new_indentation);
1070 }
1071 yield_now().await;
1072 }
1073 }
1074 }
1075 indent_columns
1076 })
1077 }
1078
1079 fn apply_autoindents(
1080 &mut self,
1081 indent_columns: BTreeMap<u32, u32>,
1082 cx: &mut ModelContext<Self>,
1083 ) {
1084 self.autoindent_requests.clear();
1085 self.start_transaction();
1086 for (row, indent_column) in &indent_columns {
1087 self.set_indent_column_for_line(*row, *indent_column, cx);
1088 }
1089 self.end_transaction(cx);
1090 }
1091
1092 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1093 let current_column = self.indent_column_for_line(row);
1094 if column > current_column {
1095 let offset = Point::new(row, 0).to_offset(&*self);
1096 self.edit(
1097 [offset..offset],
1098 " ".repeat((column - current_column) as usize),
1099 cx,
1100 );
1101 } else if column < current_column {
1102 self.edit(
1103 [Point::new(row, 0)..Point::new(row, current_column - column)],
1104 "",
1105 cx,
1106 );
1107 }
1108 }
1109
1110 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1111 // TODO: it would be nice to not allocate here.
1112 let old_text = self.text();
1113 let base_version = self.version();
1114 cx.background().spawn(async move {
1115 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1116 .iter_all_changes()
1117 .map(|c| (c.tag(), c.value().len()))
1118 .collect::<Vec<_>>();
1119 Diff {
1120 base_version,
1121 new_text,
1122 changes,
1123 }
1124 })
1125 }
1126
1127 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1128 if self.version == diff.base_version {
1129 self.start_transaction();
1130 let mut offset = 0;
1131 for (tag, len) in diff.changes {
1132 let range = offset..(offset + len);
1133 match tag {
1134 ChangeTag::Equal => offset += len,
1135 ChangeTag::Delete => self.edit(Some(range), "", cx),
1136 ChangeTag::Insert => {
1137 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1138 offset += len;
1139 }
1140 }
1141 }
1142 self.end_transaction(cx);
1143 true
1144 } else {
1145 false
1146 }
1147 }
1148
1149 pub fn is_dirty(&self) -> bool {
1150 !self.saved_version.observed_all(&self.version)
1151 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1152 }
1153
1154 pub fn has_conflict(&self) -> bool {
1155 !self.saved_version.observed_all(&self.version)
1156 && self
1157 .file
1158 .as_ref()
1159 .map_or(false, |file| file.mtime() > self.saved_mtime)
1160 }
1161
1162 pub fn subscribe(&mut self) -> Subscription {
1163 self.text.subscribe()
1164 }
1165
1166 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1167 self.start_transaction_at(Instant::now())
1168 }
1169
1170 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1171 self.text.start_transaction_at(now)
1172 }
1173
1174 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1175 self.end_transaction_at(Instant::now(), cx)
1176 }
1177
1178 pub fn end_transaction_at(
1179 &mut self,
1180 now: Instant,
1181 cx: &mut ModelContext<Self>,
1182 ) -> Option<TransactionId> {
1183 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1184 let was_dirty = start_version != self.saved_version;
1185 self.did_edit(&start_version, was_dirty, cx);
1186 Some(transaction_id)
1187 } else {
1188 None
1189 }
1190 }
1191
1192 pub fn set_active_selections(
1193 &mut self,
1194 selections: Arc<[Selection<Anchor>]>,
1195 cx: &mut ModelContext<Self>,
1196 ) {
1197 let lamport_timestamp = self.text.lamport_clock.tick();
1198 self.remote_selections.insert(
1199 self.text.replica_id(),
1200 SelectionSet {
1201 selections: selections.clone(),
1202 lamport_timestamp,
1203 },
1204 );
1205 self.send_operation(
1206 Operation::UpdateSelections {
1207 replica_id: self.text.replica_id(),
1208 selections,
1209 lamport_timestamp,
1210 },
1211 cx,
1212 );
1213 }
1214
1215 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1216 self.set_active_selections(Arc::from([]), cx);
1217 }
1218
1219 fn update_language_server(&mut self) {
1220 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1221 language_server
1222 } else {
1223 return;
1224 };
1225 let abs_path = self
1226 .file
1227 .as_ref()
1228 .map_or(Path::new("/").to_path_buf(), |file| {
1229 file.abs_path().unwrap()
1230 });
1231
1232 let version = post_inc(&mut language_server.next_version);
1233 let snapshot = LanguageServerSnapshot {
1234 buffer_snapshot: self.text.snapshot(),
1235 version,
1236 path: Arc::from(abs_path),
1237 };
1238 language_server
1239 .pending_snapshots
1240 .insert(version, snapshot.clone());
1241 let _ = language_server
1242 .latest_snapshot
1243 .blocking_send(Some(snapshot));
1244 }
1245
1246 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1247 where
1248 I: IntoIterator<Item = Range<S>>,
1249 S: ToOffset,
1250 T: Into<String>,
1251 {
1252 self.edit_internal(ranges_iter, new_text, false, cx)
1253 }
1254
1255 pub fn edit_with_autoindent<I, S, T>(
1256 &mut self,
1257 ranges_iter: I,
1258 new_text: T,
1259 cx: &mut ModelContext<Self>,
1260 ) where
1261 I: IntoIterator<Item = Range<S>>,
1262 S: ToOffset,
1263 T: Into<String>,
1264 {
1265 self.edit_internal(ranges_iter, new_text, true, cx)
1266 }
1267
1268 /*
1269 impl Buffer
1270 pub fn edit
1271 pub fn edit_internal
1272 pub fn edit_with_autoindent
1273 */
1274
1275 pub fn edit_internal<I, S, T>(
1276 &mut self,
1277 ranges_iter: I,
1278 new_text: T,
1279 autoindent: bool,
1280 cx: &mut ModelContext<Self>,
1281 ) where
1282 I: IntoIterator<Item = Range<S>>,
1283 S: ToOffset,
1284 T: Into<String>,
1285 {
1286 let new_text = new_text.into();
1287
1288 // Skip invalid ranges and coalesce contiguous ones.
1289 let mut ranges: Vec<Range<usize>> = Vec::new();
1290 for range in ranges_iter {
1291 let range = range.start.to_offset(self)..range.end.to_offset(self);
1292 if !new_text.is_empty() || !range.is_empty() {
1293 if let Some(prev_range) = ranges.last_mut() {
1294 if prev_range.end >= range.start {
1295 prev_range.end = cmp::max(prev_range.end, range.end);
1296 } else {
1297 ranges.push(range);
1298 }
1299 } else {
1300 ranges.push(range);
1301 }
1302 }
1303 }
1304 if ranges.is_empty() {
1305 return;
1306 }
1307
1308 self.start_transaction();
1309 self.pending_autoindent.take();
1310 let autoindent_request = if autoindent && self.language.is_some() {
1311 let before_edit = self.snapshot();
1312 let edited = ranges
1313 .iter()
1314 .filter_map(|range| {
1315 let start = range.start.to_point(self);
1316 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1317 None
1318 } else {
1319 Some(self.anchor_before(range.start))
1320 }
1321 })
1322 .collect();
1323 Some((before_edit, edited))
1324 } else {
1325 None
1326 };
1327
1328 let first_newline_ix = new_text.find('\n');
1329 let new_text_len = new_text.len();
1330
1331 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1332
1333 if let Some((before_edit, edited)) = autoindent_request {
1334 let mut inserted = None;
1335 if let Some(first_newline_ix) = first_newline_ix {
1336 let mut delta = 0isize;
1337 inserted = Some(
1338 ranges
1339 .iter()
1340 .map(|range| {
1341 let start =
1342 (delta + range.start as isize) as usize + first_newline_ix + 1;
1343 let end = (delta + range.start as isize) as usize + new_text_len;
1344 delta +=
1345 (range.end as isize - range.start as isize) + new_text_len as isize;
1346 self.anchor_before(start)..self.anchor_after(end)
1347 })
1348 .collect(),
1349 );
1350 }
1351
1352 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1353 before_edit,
1354 edited,
1355 inserted,
1356 }));
1357 }
1358
1359 self.end_transaction(cx);
1360 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1361 }
1362
1363 fn did_edit(
1364 &mut self,
1365 old_version: &clock::Global,
1366 was_dirty: bool,
1367 cx: &mut ModelContext<Self>,
1368 ) {
1369 if self.edits_since::<usize>(old_version).next().is_none() {
1370 return;
1371 }
1372
1373 self.reparse(cx);
1374 self.update_language_server();
1375
1376 cx.emit(Event::Edited);
1377 if !was_dirty {
1378 cx.emit(Event::Dirtied);
1379 }
1380 cx.notify();
1381 }
1382
1383 fn grammar(&self) -> Option<&Arc<Grammar>> {
1384 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1385 }
1386
1387 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1388 &mut self,
1389 ops: I,
1390 cx: &mut ModelContext<Self>,
1391 ) -> Result<()> {
1392 self.pending_autoindent.take();
1393 let was_dirty = self.is_dirty();
1394 let old_version = self.version.clone();
1395 let mut deferred_ops = Vec::new();
1396 let buffer_ops = ops
1397 .into_iter()
1398 .filter_map(|op| match op {
1399 Operation::Buffer(op) => Some(op),
1400 _ => {
1401 if self.can_apply_op(&op) {
1402 self.apply_op(op, cx);
1403 } else {
1404 deferred_ops.push(op);
1405 }
1406 None
1407 }
1408 })
1409 .collect::<Vec<_>>();
1410 self.text.apply_ops(buffer_ops)?;
1411 self.deferred_ops.insert(deferred_ops);
1412 self.flush_deferred_ops(cx);
1413 self.did_edit(&old_version, was_dirty, cx);
1414 // Notify independently of whether the buffer was edited as the operations could include a
1415 // selection update.
1416 cx.notify();
1417 Ok(())
1418 }
1419
1420 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1421 let mut deferred_ops = Vec::new();
1422 for op in self.deferred_ops.drain().iter().cloned() {
1423 if self.can_apply_op(&op) {
1424 self.apply_op(op, cx);
1425 } else {
1426 deferred_ops.push(op);
1427 }
1428 }
1429 self.deferred_ops.insert(deferred_ops);
1430 }
1431
1432 fn can_apply_op(&self, operation: &Operation) -> bool {
1433 match operation {
1434 Operation::Buffer(_) => {
1435 unreachable!("buffer operations should never be applied at this layer")
1436 }
1437 Operation::UpdateDiagnostics {
1438 diagnostics: diagnostic_set,
1439 ..
1440 } => diagnostic_set.iter().all(|diagnostic| {
1441 self.text.can_resolve(&diagnostic.range.start)
1442 && self.text.can_resolve(&diagnostic.range.end)
1443 }),
1444 Operation::UpdateSelections { selections, .. } => selections
1445 .iter()
1446 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1447 }
1448 }
1449
1450 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1451 match operation {
1452 Operation::Buffer(_) => {
1453 unreachable!("buffer operations should never be applied at this layer")
1454 }
1455 Operation::UpdateDiagnostics {
1456 diagnostics: diagnostic_set,
1457 ..
1458 } => {
1459 let snapshot = self.snapshot();
1460 self.apply_diagnostic_update(
1461 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1462 cx,
1463 );
1464 }
1465 Operation::UpdateSelections {
1466 replica_id,
1467 selections,
1468 lamport_timestamp,
1469 } => {
1470 if let Some(set) = self.remote_selections.get(&replica_id) {
1471 if set.lamport_timestamp > lamport_timestamp {
1472 return;
1473 }
1474 }
1475
1476 self.remote_selections.insert(
1477 replica_id,
1478 SelectionSet {
1479 selections,
1480 lamport_timestamp,
1481 },
1482 );
1483 self.text.lamport_clock.observe(lamport_timestamp);
1484 self.selections_update_count += 1;
1485 }
1486 }
1487 }
1488
1489 fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
1490 self.diagnostics = diagnostics;
1491 self.diagnostics_update_count += 1;
1492 cx.notify();
1493 cx.emit(Event::DiagnosticsUpdated);
1494 }
1495
1496 #[cfg(not(test))]
1497 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1498 if let Some(file) = &self.file {
1499 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1500 }
1501 }
1502
1503 #[cfg(test)]
1504 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1505 self.operations.push(operation);
1506 }
1507
1508 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1509 self.remote_selections.remove(&replica_id);
1510 cx.notify();
1511 }
1512
1513 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1514 let was_dirty = self.is_dirty();
1515 let old_version = self.version.clone();
1516
1517 if let Some((transaction_id, operation)) = self.text.undo() {
1518 self.send_operation(Operation::Buffer(operation), cx);
1519 self.did_edit(&old_version, was_dirty, cx);
1520 Some(transaction_id)
1521 } else {
1522 None
1523 }
1524 }
1525
1526 pub fn undo_transaction(
1527 &mut self,
1528 transaction_id: TransactionId,
1529 cx: &mut ModelContext<Self>,
1530 ) -> bool {
1531 let was_dirty = self.is_dirty();
1532 let old_version = self.version.clone();
1533
1534 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1535 self.send_operation(Operation::Buffer(operation), cx);
1536 self.did_edit(&old_version, was_dirty, cx);
1537 true
1538 } else {
1539 false
1540 }
1541 }
1542
1543 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1544 let was_dirty = self.is_dirty();
1545 let old_version = self.version.clone();
1546
1547 if let Some((transaction_id, operation)) = self.text.redo() {
1548 self.send_operation(Operation::Buffer(operation), cx);
1549 self.did_edit(&old_version, was_dirty, cx);
1550 Some(transaction_id)
1551 } else {
1552 None
1553 }
1554 }
1555
1556 pub fn redo_transaction(
1557 &mut self,
1558 transaction_id: TransactionId,
1559 cx: &mut ModelContext<Self>,
1560 ) -> bool {
1561 let was_dirty = self.is_dirty();
1562 let old_version = self.version.clone();
1563
1564 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1565 self.send_operation(Operation::Buffer(operation), cx);
1566 self.did_edit(&old_version, was_dirty, cx);
1567 true
1568 } else {
1569 false
1570 }
1571 }
1572}
1573
1574#[cfg(any(test, feature = "test-support"))]
1575impl Buffer {
1576 pub fn set_group_interval(&mut self, group_interval: Duration) {
1577 self.text.set_group_interval(group_interval);
1578 }
1579
1580 pub fn randomly_edit<T>(
1581 &mut self,
1582 rng: &mut T,
1583 old_range_count: usize,
1584 cx: &mut ModelContext<Self>,
1585 ) where
1586 T: rand::Rng,
1587 {
1588 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1589 for _ in 0..old_range_count {
1590 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1591 if last_end > self.len() {
1592 break;
1593 }
1594 old_ranges.push(self.text.random_byte_range(last_end, rng));
1595 }
1596 let new_text_len = rng.gen_range(0..10);
1597 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1598 .take(new_text_len)
1599 .collect();
1600 log::info!(
1601 "mutating buffer {} at {:?}: {:?}",
1602 self.replica_id(),
1603 old_ranges,
1604 new_text
1605 );
1606 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1607 }
1608
1609 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1610 let was_dirty = self.is_dirty();
1611 let old_version = self.version.clone();
1612
1613 let ops = self.text.randomly_undo_redo(rng);
1614 if !ops.is_empty() {
1615 for op in ops {
1616 self.send_operation(Operation::Buffer(op), cx);
1617 self.did_edit(&old_version, was_dirty, cx);
1618 }
1619 }
1620 }
1621}
1622
1623impl Entity for Buffer {
1624 type Event = Event;
1625
1626 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1627 if let Some(file) = self.file.as_ref() {
1628 file.buffer_removed(self.remote_id(), cx);
1629 }
1630 }
1631}
1632
1633impl Deref for Buffer {
1634 type Target = TextBuffer;
1635
1636 fn deref(&self) -> &Self::Target {
1637 &self.text
1638 }
1639}
1640
1641impl BufferSnapshot {
1642 fn suggest_autoindents<'a>(
1643 &'a self,
1644 row_range: Range<u32>,
1645 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1646 let mut query_cursor = QueryCursorHandle::new();
1647 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1648 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1649
1650 // Get the "indentation ranges" that intersect this row range.
1651 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1652 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1653 query_cursor.set_point_range(
1654 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1655 ..Point::new(row_range.end, 0).to_ts_point(),
1656 );
1657 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1658 for mat in query_cursor.matches(
1659 &grammar.indents_query,
1660 tree.root_node(),
1661 TextProvider(self.as_rope()),
1662 ) {
1663 let mut node_kind = "";
1664 let mut start: Option<Point> = None;
1665 let mut end: Option<Point> = None;
1666 for capture in mat.captures {
1667 if Some(capture.index) == indent_capture_ix {
1668 node_kind = capture.node.kind();
1669 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1670 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1671 } else if Some(capture.index) == end_capture_ix {
1672 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1673 }
1674 }
1675
1676 if let Some((start, end)) = start.zip(end) {
1677 if start.row == end.row {
1678 continue;
1679 }
1680
1681 let range = start..end;
1682 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1683 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1684 Ok(ix) => {
1685 let prev_range = &mut indentation_ranges[ix];
1686 prev_range.0.end = prev_range.0.end.max(range.end);
1687 }
1688 }
1689 }
1690 }
1691
1692 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1693 Some(row_range.map(move |row| {
1694 let row_start = Point::new(row, self.indent_column_for_line(row));
1695
1696 let mut indent_from_prev_row = false;
1697 let mut outdent_to_row = u32::MAX;
1698 for (range, _node_kind) in &indentation_ranges {
1699 if range.start.row >= row {
1700 break;
1701 }
1702
1703 if range.start.row == prev_row && range.end > row_start {
1704 indent_from_prev_row = true;
1705 }
1706 if range.end.row >= prev_row && range.end <= row_start {
1707 outdent_to_row = outdent_to_row.min(range.start.row);
1708 }
1709 }
1710
1711 let suggestion = if outdent_to_row == prev_row {
1712 IndentSuggestion {
1713 basis_row: prev_row,
1714 indent: false,
1715 }
1716 } else if indent_from_prev_row {
1717 IndentSuggestion {
1718 basis_row: prev_row,
1719 indent: true,
1720 }
1721 } else if outdent_to_row < prev_row {
1722 IndentSuggestion {
1723 basis_row: outdent_to_row,
1724 indent: false,
1725 }
1726 } else {
1727 IndentSuggestion {
1728 basis_row: prev_row,
1729 indent: false,
1730 }
1731 };
1732
1733 prev_row = row;
1734 suggestion
1735 }))
1736 } else {
1737 None
1738 }
1739 }
1740
1741 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1742 while row > 0 {
1743 row -= 1;
1744 if !self.is_line_blank(row) {
1745 return Some(row);
1746 }
1747 }
1748 None
1749 }
1750
1751 pub fn chunks<'a, T: ToOffset>(
1752 &'a self,
1753 range: Range<T>,
1754 theme: Option<&'a SyntaxTheme>,
1755 ) -> BufferChunks<'a> {
1756 let range = range.start.to_offset(self)..range.end.to_offset(self);
1757
1758 let mut highlights = None;
1759 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1760 if let Some(theme) = theme {
1761 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
1762 diagnostic_endpoints.push(DiagnosticEndpoint {
1763 offset: entry.range.start,
1764 is_start: true,
1765 severity: entry.diagnostic.severity,
1766 });
1767 diagnostic_endpoints.push(DiagnosticEndpoint {
1768 offset: entry.range.end,
1769 is_start: false,
1770 severity: entry.diagnostic.severity,
1771 });
1772 }
1773 diagnostic_endpoints
1774 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1775
1776 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1777 let mut query_cursor = QueryCursorHandle::new();
1778
1779 // TODO - add a Tree-sitter API to remove the need for this.
1780 let cursor = unsafe {
1781 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1782 };
1783 let captures = cursor.set_byte_range(range.clone()).captures(
1784 &grammar.highlights_query,
1785 tree.root_node(),
1786 TextProvider(self.text.as_rope()),
1787 );
1788 highlights = Some(BufferChunkHighlights {
1789 captures,
1790 next_capture: None,
1791 stack: Default::default(),
1792 highlight_map: grammar.highlight_map(),
1793 _query_cursor: query_cursor,
1794 theme,
1795 })
1796 }
1797 }
1798
1799 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1800 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1801
1802 BufferChunks {
1803 range,
1804 chunks,
1805 diagnostic_endpoints,
1806 error_depth: 0,
1807 warning_depth: 0,
1808 information_depth: 0,
1809 hint_depth: 0,
1810 highlights,
1811 }
1812 }
1813
1814 pub fn language(&self) -> Option<&Arc<Language>> {
1815 self.language.as_ref()
1816 }
1817
1818 fn grammar(&self) -> Option<&Arc<Grammar>> {
1819 self.language
1820 .as_ref()
1821 .and_then(|language| language.grammar.as_ref())
1822 }
1823
1824 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1825 if let Some(tree) = self.tree.as_ref() {
1826 let root = tree.root_node();
1827 let range = range.start.to_offset(self)..range.end.to_offset(self);
1828 let mut node = root.descendant_for_byte_range(range.start, range.end);
1829 while node.map_or(false, |n| n.byte_range() == range) {
1830 node = node.unwrap().parent();
1831 }
1832 node.map(|n| n.byte_range())
1833 } else {
1834 None
1835 }
1836 }
1837
1838 pub fn outline(&self) -> Option<Outline> {
1839 let tree = self.tree.as_ref()?;
1840 let grammar = self
1841 .language
1842 .as_ref()
1843 .and_then(|language| language.grammar.as_ref())?;
1844
1845 let mut cursor = QueryCursorHandle::new();
1846 let matches = cursor.matches(
1847 &grammar.outline_query,
1848 tree.root_node(),
1849 TextProvider(self.as_rope()),
1850 );
1851
1852 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
1853 let context_capture_ix = grammar.outline_query.capture_index_for_name("context")?;
1854 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
1855
1856 let mut stack: Vec<Range<usize>> = Default::default();
1857 let mut id = 0;
1858 let items = matches
1859 .filter_map(|mat| {
1860 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
1861 let mut name_node = Some(mat.nodes_for_capture_index(name_capture_ix).next()?);
1862 let mut context_nodes = mat.nodes_for_capture_index(context_capture_ix).peekable();
1863
1864 let id = post_inc(&mut id);
1865 let range = item_node.start_byte()..item_node.end_byte();
1866
1867 let mut text = String::new();
1868 let mut name_range_in_text = 0..0;
1869 loop {
1870 let node;
1871 let node_is_name;
1872 match (context_nodes.peek(), name_node.as_ref()) {
1873 (None, None) => break,
1874 (None, Some(_)) => {
1875 node = name_node.take().unwrap();
1876 node_is_name = true;
1877 }
1878 (Some(_), None) => {
1879 node = context_nodes.next().unwrap();
1880 node_is_name = false;
1881 }
1882 (Some(context_node), Some(name)) => {
1883 if context_node.start_byte() < name.start_byte() {
1884 node = context_nodes.next().unwrap();
1885 node_is_name = false;
1886 } else {
1887 node = name_node.take().unwrap();
1888 node_is_name = true;
1889 }
1890 }
1891 }
1892
1893 if !text.is_empty() {
1894 text.push(' ');
1895 }
1896 let range = node.start_byte()..node.end_byte();
1897 if node_is_name {
1898 name_range_in_text = text.len()..(text.len() + range.len())
1899 }
1900 text.extend(self.text_for_range(range));
1901 }
1902
1903 while stack.last().map_or(false, |prev_range| {
1904 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
1905 }) {
1906 stack.pop();
1907 }
1908 stack.push(range.clone());
1909
1910 Some(OutlineItem {
1911 id,
1912 depth: stack.len() - 1,
1913 range,
1914 text,
1915 name_range_in_text,
1916 })
1917 })
1918 .collect::<Vec<_>>();
1919
1920 Some(Outline(items))
1921 }
1922
1923 pub fn enclosing_bracket_ranges<T: ToOffset>(
1924 &self,
1925 range: Range<T>,
1926 ) -> Option<(Range<usize>, Range<usize>)> {
1927 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1928 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1929 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1930
1931 // Find bracket pairs that *inclusively* contain the given range.
1932 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1933 let mut cursor = QueryCursorHandle::new();
1934 let matches = cursor.set_byte_range(range).matches(
1935 &grammar.brackets_query,
1936 tree.root_node(),
1937 TextProvider(self.as_rope()),
1938 );
1939
1940 // Get the ranges of the innermost pair of brackets.
1941 matches
1942 .filter_map(|mat| {
1943 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1944 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1945 Some((open.byte_range(), close.byte_range()))
1946 })
1947 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1948 }
1949
1950 /*
1951 impl BufferSnapshot
1952 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
1953 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
1954 */
1955
1956 pub fn remote_selections_in_range<'a>(
1957 &'a self,
1958 range: Range<Anchor>,
1959 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
1960 {
1961 self.remote_selections
1962 .iter()
1963 .filter(|(replica_id, set)| {
1964 **replica_id != self.text.replica_id() && !set.selections.is_empty()
1965 })
1966 .map(move |(replica_id, set)| {
1967 let start_ix = match set.selections.binary_search_by(|probe| {
1968 probe
1969 .end
1970 .cmp(&range.start, self)
1971 .unwrap()
1972 .then(Ordering::Greater)
1973 }) {
1974 Ok(ix) | Err(ix) => ix,
1975 };
1976 let end_ix = match set.selections.binary_search_by(|probe| {
1977 probe
1978 .start
1979 .cmp(&range.end, self)
1980 .unwrap()
1981 .then(Ordering::Less)
1982 }) {
1983 Ok(ix) | Err(ix) => ix,
1984 };
1985
1986 (*replica_id, set.selections[start_ix..end_ix].iter())
1987 })
1988 }
1989
1990 pub fn diagnostics_in_range<'a, T, O>(
1991 &'a self,
1992 search_range: Range<T>,
1993 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1994 where
1995 T: 'a + Clone + ToOffset,
1996 O: 'a + FromAnchor,
1997 {
1998 self.diagnostics.range(search_range.clone(), self, true)
1999 }
2000
2001 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2002 let mut groups = Vec::new();
2003 self.diagnostics.groups(&mut groups, self);
2004 groups
2005 }
2006
2007 pub fn diagnostic_group<'a, O>(
2008 &'a self,
2009 group_id: usize,
2010 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2011 where
2012 O: 'a + FromAnchor,
2013 {
2014 self.diagnostics.group(group_id, self)
2015 }
2016
2017 pub fn diagnostics_update_count(&self) -> usize {
2018 self.diagnostics_update_count
2019 }
2020
2021 pub fn parse_count(&self) -> usize {
2022 self.parse_count
2023 }
2024
2025 pub fn selections_update_count(&self) -> usize {
2026 self.selections_update_count
2027 }
2028}
2029
2030impl Clone for BufferSnapshot {
2031 fn clone(&self) -> Self {
2032 Self {
2033 text: self.text.clone(),
2034 tree: self.tree.clone(),
2035 remote_selections: self.remote_selections.clone(),
2036 diagnostics: self.diagnostics.clone(),
2037 selections_update_count: self.selections_update_count,
2038 diagnostics_update_count: self.diagnostics_update_count,
2039 is_parsing: self.is_parsing,
2040 language: self.language.clone(),
2041 parse_count: self.parse_count,
2042 }
2043 }
2044}
2045
2046impl Deref for BufferSnapshot {
2047 type Target = text::BufferSnapshot;
2048
2049 fn deref(&self) -> &Self::Target {
2050 &self.text
2051 }
2052}
2053
2054impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2055 type I = ByteChunks<'a>;
2056
2057 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2058 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2059 }
2060}
2061
2062struct ByteChunks<'a>(rope::Chunks<'a>);
2063
2064impl<'a> Iterator for ByteChunks<'a> {
2065 type Item = &'a [u8];
2066
2067 fn next(&mut self) -> Option<Self::Item> {
2068 self.0.next().map(str::as_bytes)
2069 }
2070}
2071
2072unsafe impl<'a> Send for BufferChunks<'a> {}
2073
2074impl<'a> BufferChunks<'a> {
2075 pub fn seek(&mut self, offset: usize) {
2076 self.range.start = offset;
2077 self.chunks.seek(self.range.start);
2078 if let Some(highlights) = self.highlights.as_mut() {
2079 highlights
2080 .stack
2081 .retain(|(end_offset, _)| *end_offset > offset);
2082 if let Some((mat, capture_ix)) = &highlights.next_capture {
2083 let capture = mat.captures[*capture_ix as usize];
2084 if offset >= capture.node.start_byte() {
2085 let next_capture_end = capture.node.end_byte();
2086 if offset < next_capture_end {
2087 highlights.stack.push((
2088 next_capture_end,
2089 highlights.highlight_map.get(capture.index),
2090 ));
2091 }
2092 highlights.next_capture.take();
2093 }
2094 }
2095 highlights.captures.set_byte_range(self.range.clone());
2096 }
2097 }
2098
2099 pub fn offset(&self) -> usize {
2100 self.range.start
2101 }
2102
2103 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2104 let depth = match endpoint.severity {
2105 DiagnosticSeverity::ERROR => &mut self.error_depth,
2106 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2107 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2108 DiagnosticSeverity::HINT => &mut self.hint_depth,
2109 _ => return,
2110 };
2111 if endpoint.is_start {
2112 *depth += 1;
2113 } else {
2114 *depth -= 1;
2115 }
2116 }
2117
2118 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2119 if self.error_depth > 0 {
2120 Some(DiagnosticSeverity::ERROR)
2121 } else if self.warning_depth > 0 {
2122 Some(DiagnosticSeverity::WARNING)
2123 } else if self.information_depth > 0 {
2124 Some(DiagnosticSeverity::INFORMATION)
2125 } else if self.hint_depth > 0 {
2126 Some(DiagnosticSeverity::HINT)
2127 } else {
2128 None
2129 }
2130 }
2131}
2132
2133impl<'a> Iterator for BufferChunks<'a> {
2134 type Item = Chunk<'a>;
2135
2136 fn next(&mut self) -> Option<Self::Item> {
2137 let mut next_capture_start = usize::MAX;
2138 let mut next_diagnostic_endpoint = usize::MAX;
2139
2140 if let Some(highlights) = self.highlights.as_mut() {
2141 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2142 if *parent_capture_end <= self.range.start {
2143 highlights.stack.pop();
2144 } else {
2145 break;
2146 }
2147 }
2148
2149 if highlights.next_capture.is_none() {
2150 highlights.next_capture = highlights.captures.next();
2151 }
2152
2153 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2154 let capture = mat.captures[*capture_ix as usize];
2155 if self.range.start < capture.node.start_byte() {
2156 next_capture_start = capture.node.start_byte();
2157 break;
2158 } else {
2159 let highlight_id = highlights.highlight_map.get(capture.index);
2160 highlights
2161 .stack
2162 .push((capture.node.end_byte(), highlight_id));
2163 highlights.next_capture = highlights.captures.next();
2164 }
2165 }
2166 }
2167
2168 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2169 if endpoint.offset <= self.range.start {
2170 self.update_diagnostic_depths(endpoint);
2171 self.diagnostic_endpoints.next();
2172 } else {
2173 next_diagnostic_endpoint = endpoint.offset;
2174 break;
2175 }
2176 }
2177
2178 if let Some(chunk) = self.chunks.peek() {
2179 let chunk_start = self.range.start;
2180 let mut chunk_end = (self.chunks.offset() + chunk.len())
2181 .min(next_capture_start)
2182 .min(next_diagnostic_endpoint);
2183 let mut highlight_style = None;
2184 if let Some(highlights) = self.highlights.as_ref() {
2185 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2186 chunk_end = chunk_end.min(*parent_capture_end);
2187 highlight_style = parent_highlight_id.style(highlights.theme);
2188 }
2189 }
2190
2191 let slice =
2192 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2193 self.range.start = chunk_end;
2194 if self.range.start == self.chunks.offset() + chunk.len() {
2195 self.chunks.next().unwrap();
2196 }
2197
2198 Some(Chunk {
2199 text: slice,
2200 highlight_style,
2201 diagnostic: self.current_diagnostic_severity(),
2202 })
2203 } else {
2204 None
2205 }
2206 }
2207}
2208
2209impl QueryCursorHandle {
2210 pub(crate) fn new() -> Self {
2211 QueryCursorHandle(Some(
2212 QUERY_CURSORS
2213 .lock()
2214 .pop()
2215 .unwrap_or_else(|| QueryCursor::new()),
2216 ))
2217 }
2218}
2219
2220impl Deref for QueryCursorHandle {
2221 type Target = QueryCursor;
2222
2223 fn deref(&self) -> &Self::Target {
2224 self.0.as_ref().unwrap()
2225 }
2226}
2227
2228impl DerefMut for QueryCursorHandle {
2229 fn deref_mut(&mut self) -> &mut Self::Target {
2230 self.0.as_mut().unwrap()
2231 }
2232}
2233
2234impl Drop for QueryCursorHandle {
2235 fn drop(&mut self) {
2236 let mut cursor = self.0.take().unwrap();
2237 cursor.set_byte_range(0..usize::MAX);
2238 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2239 QUERY_CURSORS.lock().push(cursor)
2240 }
2241}
2242
2243trait ToTreeSitterPoint {
2244 fn to_ts_point(self) -> tree_sitter::Point;
2245 fn from_ts_point(point: tree_sitter::Point) -> Self;
2246}
2247
2248impl ToTreeSitterPoint for Point {
2249 fn to_ts_point(self) -> tree_sitter::Point {
2250 tree_sitter::Point::new(self.row as usize, self.column as usize)
2251 }
2252
2253 fn from_ts_point(point: tree_sitter::Point) -> Self {
2254 Point::new(point.row as u32, point.column as u32)
2255 }
2256}
2257
2258impl operation_queue::Operation for Operation {
2259 fn lamport_timestamp(&self) -> clock::Lamport {
2260 match self {
2261 Operation::Buffer(_) => {
2262 unreachable!("buffer operations should never be deferred at this layer")
2263 }
2264 Operation::UpdateDiagnostics {
2265 lamport_timestamp, ..
2266 }
2267 | Operation::UpdateSelections {
2268 lamport_timestamp, ..
2269 } => *lamport_timestamp,
2270 }
2271 }
2272}
2273
2274impl Default for Diagnostic {
2275 fn default() -> Self {
2276 Self {
2277 code: Default::default(),
2278 severity: DiagnosticSeverity::ERROR,
2279 message: Default::default(),
2280 group_id: Default::default(),
2281 is_primary: Default::default(),
2282 is_valid: true,
2283 is_disk_based: false,
2284 }
2285 }
2286}
2287
2288pub fn contiguous_ranges(
2289 values: impl Iterator<Item = u32>,
2290 max_len: usize,
2291) -> impl Iterator<Item = Range<u32>> {
2292 let mut values = values.into_iter();
2293 let mut current_range: Option<Range<u32>> = None;
2294 std::iter::from_fn(move || loop {
2295 if let Some(value) = values.next() {
2296 if let Some(range) = &mut current_range {
2297 if value == range.end && range.len() < max_len {
2298 range.end += 1;
2299 continue;
2300 }
2301 }
2302
2303 let prev_range = current_range.clone();
2304 current_range = Some(value..(value + 1));
2305 if prev_range.is_some() {
2306 return prev_range;
2307 }
2308 } else {
2309 return current_range.take();
2310 }
2311 })
2312}