1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, Outline, ToLspPosition,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cell::RefCell,
25 cmp::{self, Ordering},
26 collections::{BTreeMap, HashMap},
27 ffi::OsString,
28 future::Future,
29 iter::{Iterator, Peekable},
30 ops::{Deref, DerefMut, Range, Sub},
31 path::{Path, PathBuf},
32 str,
33 sync::Arc,
34 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
35 vec,
36};
37use sum_tree::TreeMap;
38use text::{operation_queue::OperationQueue, rope::TextDimension};
39pub use text::{Buffer as TextBuffer, Operation as _, *};
40use theme::SyntaxTheme;
41use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
42use util::{post_inc, TryFutureExt as _};
43
44#[cfg(any(test, feature = "test-support"))]
45pub use tree_sitter_rust;
46
47pub use lsp::DiagnosticSeverity;
48
49thread_local! {
50 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
51}
52
53lazy_static! {
54 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
55}
56
57// TODO - Make this configurable
58const INDENT_SIZE: u32 = 4;
59
60pub struct Buffer {
61 text: TextBuffer,
62 file: Option<Box<dyn File>>,
63 saved_version: clock::Global,
64 saved_mtime: SystemTime,
65 language: Option<Arc<Language>>,
66 autoindent_requests: Vec<Arc<AutoindentRequest>>,
67 pending_autoindent: Option<Task<()>>,
68 sync_parse_timeout: Duration,
69 syntax_tree: Mutex<Option<SyntaxTree>>,
70 parsing_in_background: bool,
71 parse_count: usize,
72 diagnostics: DiagnosticSet,
73 remote_selections: TreeMap<ReplicaId, SelectionSet>,
74 selections_update_count: usize,
75 diagnostics_update_count: usize,
76 language_server: Option<LanguageServerState>,
77 deferred_ops: OperationQueue<Operation>,
78 #[cfg(test)]
79 pub(crate) operations: Vec<Operation>,
80}
81
82pub struct BufferSnapshot {
83 text: text::BufferSnapshot,
84 tree: Option<Tree>,
85 diagnostics: DiagnosticSet,
86 diagnostics_update_count: usize,
87 remote_selections: TreeMap<ReplicaId, SelectionSet>,
88 selections_update_count: usize,
89 is_parsing: bool,
90 language: Option<Arc<Language>>,
91 parse_count: usize,
92}
93
94#[derive(Clone, Debug)]
95struct SelectionSet {
96 selections: Arc<[Selection<Anchor>]>,
97 lamport_timestamp: clock::Lamport,
98}
99
100#[derive(Clone, Debug, PartialEq, Eq)]
101pub struct GroupId {
102 source: Arc<str>,
103 id: usize,
104}
105
106#[derive(Clone, Debug, PartialEq, Eq)]
107pub struct Diagnostic {
108 pub code: Option<String>,
109 pub severity: DiagnosticSeverity,
110 pub message: String,
111 pub group_id: usize,
112 pub is_valid: bool,
113 pub is_primary: bool,
114 pub is_disk_based: bool,
115}
116
117#[derive(Clone, Debug)]
118pub struct Completion<T> {
119 pub old_range: Range<T>,
120 pub new_text: String,
121 pub lsp_completion: lsp::CompletionItem,
122}
123
124struct LanguageServerState {
125 server: Arc<LanguageServer>,
126 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
127 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
128 next_version: usize,
129 _maintain_server: Task<Option<()>>,
130}
131
132#[derive(Clone)]
133struct LanguageServerSnapshot {
134 buffer_snapshot: text::BufferSnapshot,
135 version: usize,
136 path: Arc<Path>,
137}
138
139#[derive(Clone, Debug)]
140pub enum Operation {
141 Buffer(text::Operation),
142 UpdateDiagnostics {
143 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
144 lamport_timestamp: clock::Lamport,
145 },
146 UpdateSelections {
147 replica_id: ReplicaId,
148 selections: Arc<[Selection<Anchor>]>,
149 lamport_timestamp: clock::Lamport,
150 },
151}
152
153#[derive(Clone, Debug, Eq, PartialEq)]
154pub enum Event {
155 Edited,
156 Dirtied,
157 Saved,
158 FileHandleChanged,
159 Reloaded,
160 Reparsed,
161 DiagnosticsUpdated,
162 Closed,
163}
164
165pub trait File {
166 fn as_local(&self) -> Option<&dyn LocalFile>;
167
168 fn is_local(&self) -> bool {
169 self.as_local().is_some()
170 }
171
172 fn mtime(&self) -> SystemTime;
173
174 /// Returns the path of this file relative to the worktree's root directory.
175 fn path(&self) -> &Arc<Path>;
176
177 /// Returns the path of this file relative to the worktree's parent directory (this means it
178 /// includes the name of the worktree's root folder).
179 fn full_path(&self, cx: &AppContext) -> PathBuf;
180
181 /// Returns the last component of this handle's absolute path. If this handle refers to the root
182 /// of its worktree, then this method will return the name of the worktree itself.
183 fn file_name(&self, cx: &AppContext) -> OsString;
184
185 fn is_deleted(&self) -> bool;
186
187 fn save(
188 &self,
189 buffer_id: u64,
190 text: Rope,
191 version: clock::Global,
192 cx: &mut MutableAppContext,
193 ) -> Task<Result<(clock::Global, SystemTime)>>;
194
195 fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
196 -> Option<Task<Result<()>>>;
197
198 fn completions(
199 &self,
200 buffer_id: u64,
201 position: Anchor,
202 cx: &mut MutableAppContext,
203 ) -> Task<Result<Vec<Completion<Anchor>>>>;
204
205 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
206
207 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
208
209 fn as_any(&self) -> &dyn Any;
210
211 fn to_proto(&self) -> rpc::proto::File;
212}
213
214pub trait LocalFile: File {
215 /// Returns the absolute path of this file.
216 fn abs_path(&self, cx: &AppContext) -> PathBuf;
217
218 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
219
220 fn buffer_reloaded(
221 &self,
222 buffer_id: u64,
223 version: &clock::Global,
224 mtime: SystemTime,
225 cx: &mut MutableAppContext,
226 );
227}
228
229#[cfg(feature = "test-support")]
230pub struct FakeFile {
231 pub path: Arc<Path>,
232}
233
234#[cfg(feature = "test-support")]
235impl File for FakeFile {
236 fn as_local(&self) -> Option<&dyn LocalFile> {
237 Some(self)
238 }
239
240 fn mtime(&self) -> SystemTime {
241 SystemTime::UNIX_EPOCH
242 }
243
244 fn path(&self) -> &Arc<Path> {
245 &self.path
246 }
247
248 fn full_path(&self, _: &AppContext) -> PathBuf {
249 self.path.to_path_buf()
250 }
251
252 fn file_name(&self, _: &AppContext) -> OsString {
253 self.path.file_name().unwrap().to_os_string()
254 }
255
256 fn is_deleted(&self) -> bool {
257 false
258 }
259
260 fn save(
261 &self,
262 _: u64,
263 _: Rope,
264 _: clock::Global,
265 cx: &mut MutableAppContext,
266 ) -> Task<Result<(clock::Global, SystemTime)>> {
267 cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
268 }
269
270 fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option<Task<Result<()>>> {
271 None
272 }
273
274 fn completions(
275 &self,
276 _: u64,
277 _: Anchor,
278 _: &mut MutableAppContext,
279 ) -> Task<Result<Vec<Completion<Anchor>>>> {
280 Task::ready(Ok(Default::default()))
281 }
282
283 fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
284
285 fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
286
287 fn as_any(&self) -> &dyn Any {
288 self
289 }
290
291 fn to_proto(&self) -> rpc::proto::File {
292 unimplemented!()
293 }
294}
295
296#[cfg(feature = "test-support")]
297impl LocalFile for FakeFile {
298 fn abs_path(&self, _: &AppContext) -> PathBuf {
299 self.path.to_path_buf()
300 }
301
302 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
303 cx.background().spawn(async move { Ok(Default::default()) })
304 }
305
306 fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
307 }
308}
309
310pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
311
312#[derive(Clone)]
313struct SyntaxTree {
314 tree: Tree,
315 version: clock::Global,
316}
317
318#[derive(Clone)]
319struct AutoindentRequest {
320 before_edit: BufferSnapshot,
321 edited: Vec<Anchor>,
322 inserted: Option<Vec<Range<Anchor>>>,
323}
324
325#[derive(Debug)]
326struct IndentSuggestion {
327 basis_row: u32,
328 indent: bool,
329}
330
331struct TextProvider<'a>(&'a Rope);
332
333struct BufferChunkHighlights<'a> {
334 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
335 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
336 stack: Vec<(usize, HighlightId)>,
337 highlight_map: HighlightMap,
338 theme: &'a SyntaxTheme,
339 _query_cursor: QueryCursorHandle,
340}
341
342pub struct BufferChunks<'a> {
343 range: Range<usize>,
344 chunks: rope::Chunks<'a>,
345 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
346 error_depth: usize,
347 warning_depth: usize,
348 information_depth: usize,
349 hint_depth: usize,
350 highlights: Option<BufferChunkHighlights<'a>>,
351}
352
353#[derive(Clone, Copy, Debug, Default)]
354pub struct Chunk<'a> {
355 pub text: &'a str,
356 pub highlight_style: Option<HighlightStyle>,
357 pub diagnostic: Option<DiagnosticSeverity>,
358}
359
360pub(crate) struct Diff {
361 base_version: clock::Global,
362 new_text: Arc<str>,
363 changes: Vec<(ChangeTag, usize)>,
364}
365
366#[derive(Clone, Copy)]
367struct DiagnosticEndpoint {
368 offset: usize,
369 is_start: bool,
370 severity: DiagnosticSeverity,
371}
372
373impl Buffer {
374 pub fn new<T: Into<Arc<str>>>(
375 replica_id: ReplicaId,
376 base_text: T,
377 cx: &mut ModelContext<Self>,
378 ) -> Self {
379 Self::build(
380 TextBuffer::new(
381 replica_id,
382 cx.model_id() as u64,
383 History::new(base_text.into()),
384 ),
385 None,
386 )
387 }
388
389 pub fn from_file<T: Into<Arc<str>>>(
390 replica_id: ReplicaId,
391 base_text: T,
392 file: Box<dyn File>,
393 cx: &mut ModelContext<Self>,
394 ) -> Self {
395 Self::build(
396 TextBuffer::new(
397 replica_id,
398 cx.model_id() as u64,
399 History::new(base_text.into()),
400 ),
401 Some(file),
402 )
403 }
404
405 pub fn from_proto(
406 replica_id: ReplicaId,
407 message: proto::BufferState,
408 file: Option<Box<dyn File>>,
409 cx: &mut ModelContext<Self>,
410 ) -> Result<Self> {
411 let fragments_len = message.fragments.len();
412 let buffer = TextBuffer::from_parts(
413 replica_id,
414 message.id,
415 &message.visible_text,
416 &message.deleted_text,
417 message
418 .undo_map
419 .into_iter()
420 .map(proto::deserialize_undo_map_entry),
421 message
422 .fragments
423 .into_iter()
424 .enumerate()
425 .map(|(i, fragment)| {
426 proto::deserialize_buffer_fragment(fragment, i, fragments_len)
427 }),
428 message.lamport_timestamp,
429 From::from(message.version),
430 );
431 let mut this = Self::build(buffer, file);
432 for selection_set in message.selections {
433 this.remote_selections.insert(
434 selection_set.replica_id as ReplicaId,
435 SelectionSet {
436 selections: proto::deserialize_selections(selection_set.selections),
437 lamport_timestamp: clock::Lamport {
438 replica_id: selection_set.replica_id as ReplicaId,
439 value: selection_set.lamport_timestamp,
440 },
441 },
442 );
443 }
444 let snapshot = this.snapshot();
445 let entries = proto::deserialize_diagnostics(message.diagnostics);
446 this.apply_diagnostic_update(
447 DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
448 cx,
449 );
450
451 let deferred_ops = message
452 .deferred_operations
453 .into_iter()
454 .map(proto::deserialize_operation)
455 .collect::<Result<Vec<_>>>()?;
456 this.apply_ops(deferred_ops, cx)?;
457
458 Ok(this)
459 }
460
461 pub fn to_proto(&self) -> proto::BufferState {
462 proto::BufferState {
463 id: self.remote_id(),
464 file: self.file.as_ref().map(|f| f.to_proto()),
465 visible_text: self.text.text(),
466 deleted_text: self.text.deleted_text(),
467 undo_map: self
468 .text
469 .undo_history()
470 .map(proto::serialize_undo_map_entry)
471 .collect(),
472 version: From::from(&self.version),
473 lamport_timestamp: self.lamport_clock.value,
474 fragments: self
475 .text
476 .fragments()
477 .map(proto::serialize_buffer_fragment)
478 .collect(),
479 selections: self
480 .remote_selections
481 .iter()
482 .map(|(replica_id, set)| proto::SelectionSet {
483 replica_id: *replica_id as u32,
484 selections: proto::serialize_selections(&set.selections),
485 lamport_timestamp: set.lamport_timestamp.value,
486 })
487 .collect(),
488 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
489 deferred_operations: self
490 .deferred_ops
491 .iter()
492 .map(proto::serialize_operation)
493 .chain(
494 self.text
495 .deferred_ops()
496 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
497 )
498 .collect(),
499 }
500 }
501
502 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
503 self.set_language(Some(language), cx);
504 self
505 }
506
507 pub fn with_language_server(
508 mut self,
509 server: Arc<LanguageServer>,
510 cx: &mut ModelContext<Self>,
511 ) -> Self {
512 self.set_language_server(Some(server), cx);
513 self
514 }
515
516 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
517 let saved_mtime;
518 if let Some(file) = file.as_ref() {
519 saved_mtime = file.mtime();
520 } else {
521 saved_mtime = UNIX_EPOCH;
522 }
523
524 Self {
525 saved_mtime,
526 saved_version: buffer.version(),
527 text: buffer,
528 file,
529 syntax_tree: Mutex::new(None),
530 parsing_in_background: false,
531 parse_count: 0,
532 sync_parse_timeout: Duration::from_millis(1),
533 autoindent_requests: Default::default(),
534 pending_autoindent: Default::default(),
535 language: None,
536 remote_selections: Default::default(),
537 selections_update_count: 0,
538 diagnostics: Default::default(),
539 diagnostics_update_count: 0,
540 language_server: None,
541 deferred_ops: OperationQueue::new(),
542 #[cfg(test)]
543 operations: Default::default(),
544 }
545 }
546
547 pub fn snapshot(&self) -> BufferSnapshot {
548 BufferSnapshot {
549 text: self.text.snapshot(),
550 tree: self.syntax_tree(),
551 remote_selections: self.remote_selections.clone(),
552 diagnostics: self.diagnostics.clone(),
553 diagnostics_update_count: self.diagnostics_update_count,
554 is_parsing: self.parsing_in_background,
555 language: self.language.clone(),
556 parse_count: self.parse_count,
557 selections_update_count: self.selections_update_count,
558 }
559 }
560
561 pub fn file(&self) -> Option<&dyn File> {
562 self.file.as_deref()
563 }
564
565 pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
566 let file = if let Some(file) = self.file.as_ref() {
567 file
568 } else {
569 return Task::ready(Err(anyhow!("buffer has no file")));
570 };
571
572 if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
573 let server = server.clone();
574 let abs_path = file.as_local().unwrap().abs_path(cx);
575 let version = self.version();
576 cx.spawn(|this, mut cx| async move {
577 let edits = server
578 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
579 text_document: lsp::TextDocumentIdentifier::new(
580 lsp::Url::from_file_path(&abs_path).unwrap(),
581 ),
582 options: Default::default(),
583 work_done_progress_params: Default::default(),
584 })
585 .await?;
586
587 if let Some(edits) = edits {
588 this.update(&mut cx, |this, cx| {
589 if this.version == version {
590 this.apply_lsp_edits(edits, cx)
591 } else {
592 Err(anyhow!("buffer edited since starting to format"))
593 }
594 })
595 } else {
596 Ok(())
597 }
598 })
599 } else {
600 let format = file.format_remote(self.remote_id(), cx.as_mut());
601 cx.spawn(|_, _| async move {
602 if let Some(format) = format {
603 format.await?;
604 }
605 Ok(())
606 })
607 }
608 }
609
610 pub fn save(
611 &mut self,
612 cx: &mut ModelContext<Self>,
613 ) -> Task<Result<(clock::Global, SystemTime)>> {
614 let file = if let Some(file) = self.file.as_ref() {
615 file
616 } else {
617 return Task::ready(Err(anyhow!("buffer has no file")));
618 };
619 let text = self.as_rope().clone();
620 let version = self.version();
621 let save = file.save(self.remote_id(), text, version, cx.as_mut());
622 cx.spawn(|this, mut cx| async move {
623 let (version, mtime) = save.await?;
624 this.update(&mut cx, |this, cx| {
625 this.did_save(version.clone(), mtime, None, cx);
626 });
627 Ok((version, mtime))
628 })
629 }
630
631 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
632 self.language = language;
633 self.reparse(cx);
634 }
635
636 pub fn set_language_server(
637 &mut self,
638 language_server: Option<Arc<lsp::LanguageServer>>,
639 cx: &mut ModelContext<Self>,
640 ) {
641 self.language_server = if let Some(server) = language_server {
642 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
643 Some(LanguageServerState {
644 latest_snapshot: latest_snapshot_tx,
645 pending_snapshots: Default::default(),
646 next_version: 0,
647 server: server.clone(),
648 _maintain_server: cx.background().spawn(
649 async move {
650 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
651 while let Some(snapshot) = latest_snapshot_rx.recv().await {
652 if let Some(snapshot) = snapshot {
653 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
654 if let Some(prev_snapshot) = prev_snapshot {
655 let changes = lsp::DidChangeTextDocumentParams {
656 text_document: lsp::VersionedTextDocumentIdentifier::new(
657 uri,
658 snapshot.version as i32,
659 ),
660 content_changes: snapshot
661 .buffer_snapshot
662 .edits_since::<(PointUtf16, usize)>(
663 prev_snapshot.buffer_snapshot.version(),
664 )
665 .map(|edit| {
666 let edit_start = edit.new.start.0;
667 let edit_end = edit_start
668 + (edit.old.end.0 - edit.old.start.0);
669 let new_text = snapshot
670 .buffer_snapshot
671 .text_for_range(
672 edit.new.start.1..edit.new.end.1,
673 )
674 .collect();
675 lsp::TextDocumentContentChangeEvent {
676 range: Some(lsp::Range::new(
677 edit_start.to_lsp_position(),
678 edit_end.to_lsp_position(),
679 )),
680 range_length: None,
681 text: new_text,
682 }
683 })
684 .collect(),
685 };
686 server
687 .notify::<lsp::notification::DidChangeTextDocument>(changes)
688 .await?;
689 } else {
690 server
691 .notify::<lsp::notification::DidOpenTextDocument>(
692 lsp::DidOpenTextDocumentParams {
693 text_document: lsp::TextDocumentItem::new(
694 uri,
695 Default::default(),
696 snapshot.version as i32,
697 snapshot.buffer_snapshot.text().to_string(),
698 ),
699 },
700 )
701 .await?;
702 }
703
704 prev_snapshot = Some(snapshot);
705 }
706 }
707 Ok(())
708 }
709 .log_err(),
710 ),
711 })
712 } else {
713 None
714 };
715
716 self.update_language_server(cx);
717 }
718
719 pub fn did_save(
720 &mut self,
721 version: clock::Global,
722 mtime: SystemTime,
723 new_file: Option<Box<dyn File>>,
724 cx: &mut ModelContext<Self>,
725 ) {
726 self.saved_mtime = mtime;
727 self.saved_version = version;
728 if let Some(new_file) = new_file {
729 self.file = Some(new_file);
730 }
731 if let Some((state, local_file)) = &self
732 .language_server
733 .as_ref()
734 .zip(self.file.as_ref().and_then(|f| f.as_local()))
735 {
736 cx.background()
737 .spawn(
738 state
739 .server
740 .notify::<lsp::notification::DidSaveTextDocument>(
741 lsp::DidSaveTextDocumentParams {
742 text_document: lsp::TextDocumentIdentifier {
743 uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
744 },
745 text: None,
746 },
747 ),
748 )
749 .detach()
750 }
751 cx.emit(Event::Saved);
752 }
753
754 pub fn did_reload(
755 &mut self,
756 version: clock::Global,
757 mtime: SystemTime,
758 cx: &mut ModelContext<Self>,
759 ) {
760 self.saved_mtime = mtime;
761 self.saved_version = version;
762 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
763 file.buffer_reloaded(self.remote_id(), &self.saved_version, self.saved_mtime, cx);
764 }
765 cx.emit(Event::Reloaded);
766 cx.notify();
767 }
768
769 pub fn file_updated(
770 &mut self,
771 new_file: Box<dyn File>,
772 cx: &mut ModelContext<Self>,
773 ) -> Task<()> {
774 let old_file = if let Some(file) = self.file.as_ref() {
775 file
776 } else {
777 return Task::ready(());
778 };
779 let mut file_changed = false;
780 let mut task = Task::ready(());
781
782 if new_file.path() != old_file.path() {
783 file_changed = true;
784 }
785
786 if new_file.is_deleted() {
787 if !old_file.is_deleted() {
788 file_changed = true;
789 if !self.is_dirty() {
790 cx.emit(Event::Dirtied);
791 }
792 }
793 } else {
794 let new_mtime = new_file.mtime();
795 if new_mtime != old_file.mtime() {
796 file_changed = true;
797
798 if !self.is_dirty() {
799 task = cx.spawn(|this, mut cx| {
800 async move {
801 let new_text = this.read_with(&cx, |this, cx| {
802 this.file
803 .as_ref()
804 .and_then(|file| file.as_local().map(|f| f.load(cx)))
805 });
806 if let Some(new_text) = new_text {
807 let new_text = new_text.await?;
808 let diff = this
809 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
810 .await;
811 this.update(&mut cx, |this, cx| {
812 if this.apply_diff(diff, cx) {
813 this.did_reload(this.version(), new_mtime, cx);
814 }
815 });
816 }
817 Ok(())
818 }
819 .log_err()
820 .map(drop)
821 });
822 }
823 }
824 }
825
826 if file_changed {
827 cx.emit(Event::FileHandleChanged);
828 }
829 self.file = Some(new_file);
830 task
831 }
832
833 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
834 cx.emit(Event::Closed);
835 }
836
837 pub fn language(&self) -> Option<&Arc<Language>> {
838 self.language.as_ref()
839 }
840
841 pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
842 self.language_server.as_ref().map(|state| &state.server)
843 }
844
845 pub fn parse_count(&self) -> usize {
846 self.parse_count
847 }
848
849 pub fn selections_update_count(&self) -> usize {
850 self.selections_update_count
851 }
852
853 pub fn diagnostics_update_count(&self) -> usize {
854 self.diagnostics_update_count
855 }
856
857 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
858 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
859 self.interpolate_tree(syntax_tree);
860 Some(syntax_tree.tree.clone())
861 } else {
862 None
863 }
864 }
865
866 #[cfg(any(test, feature = "test-support"))]
867 pub fn is_parsing(&self) -> bool {
868 self.parsing_in_background
869 }
870
871 #[cfg(test)]
872 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
873 self.sync_parse_timeout = timeout;
874 }
875
876 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
877 if self.parsing_in_background {
878 return false;
879 }
880
881 if let Some(grammar) = self.grammar().cloned() {
882 let old_tree = self.syntax_tree();
883 let text = self.as_rope().clone();
884 let parsed_version = self.version();
885 let parse_task = cx.background().spawn({
886 let grammar = grammar.clone();
887 async move { Self::parse_text(&text, old_tree, &grammar) }
888 });
889
890 match cx
891 .background()
892 .block_with_timeout(self.sync_parse_timeout, parse_task)
893 {
894 Ok(new_tree) => {
895 self.did_finish_parsing(new_tree, parsed_version, cx);
896 return true;
897 }
898 Err(parse_task) => {
899 self.parsing_in_background = true;
900 cx.spawn(move |this, mut cx| async move {
901 let new_tree = parse_task.await;
902 this.update(&mut cx, move |this, cx| {
903 let grammar_changed = this
904 .grammar()
905 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
906 let parse_again =
907 this.version.changed_since(&parsed_version) || grammar_changed;
908 this.parsing_in_background = false;
909 this.did_finish_parsing(new_tree, parsed_version, cx);
910
911 if parse_again && this.reparse(cx) {
912 return;
913 }
914 });
915 })
916 .detach();
917 }
918 }
919 }
920 false
921 }
922
923 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
924 PARSER.with(|parser| {
925 let mut parser = parser.borrow_mut();
926 parser
927 .set_language(grammar.ts_language)
928 .expect("incompatible grammar");
929 let mut chunks = text.chunks_in_range(0..text.len());
930 let tree = parser
931 .parse_with(
932 &mut move |offset, _| {
933 chunks.seek(offset);
934 chunks.next().unwrap_or("").as_bytes()
935 },
936 old_tree.as_ref(),
937 )
938 .unwrap();
939 tree
940 })
941 }
942
943 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
944 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
945 let (bytes, lines) = edit.flatten();
946 tree.tree.edit(&InputEdit {
947 start_byte: bytes.new.start,
948 old_end_byte: bytes.new.start + bytes.old.len(),
949 new_end_byte: bytes.new.end,
950 start_position: lines.new.start.to_ts_point(),
951 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
952 .to_ts_point(),
953 new_end_position: lines.new.end.to_ts_point(),
954 });
955 }
956 tree.version = self.version();
957 }
958
959 fn did_finish_parsing(
960 &mut self,
961 tree: Tree,
962 version: clock::Global,
963 cx: &mut ModelContext<Self>,
964 ) {
965 self.parse_count += 1;
966 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
967 self.request_autoindent(cx);
968 cx.emit(Event::Reparsed);
969 cx.notify();
970 }
971
972 pub fn update_diagnostics<T>(
973 &mut self,
974 version: Option<i32>,
975 mut diagnostics: Vec<DiagnosticEntry<T>>,
976 cx: &mut ModelContext<Self>,
977 ) -> Result<()>
978 where
979 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
980 {
981 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
982 Ordering::Equal
983 .then_with(|| b.is_primary.cmp(&a.is_primary))
984 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
985 .then_with(|| a.severity.cmp(&b.severity))
986 .then_with(|| a.message.cmp(&b.message))
987 }
988
989 let version = version.map(|version| version as usize);
990 let content =
991 if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
992 language_server
993 .pending_snapshots
994 .retain(|&v, _| v >= version);
995 let snapshot = language_server
996 .pending_snapshots
997 .get(&version)
998 .ok_or_else(|| anyhow!("missing snapshot"))?;
999 &snapshot.buffer_snapshot
1000 } else {
1001 self.deref()
1002 };
1003
1004 diagnostics.sort_unstable_by(|a, b| {
1005 Ordering::Equal
1006 .then_with(|| a.range.start.cmp(&b.range.start))
1007 .then_with(|| b.range.end.cmp(&a.range.end))
1008 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1009 });
1010
1011 let mut sanitized_diagnostics = Vec::new();
1012 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
1013 let mut last_edit_old_end = T::default();
1014 let mut last_edit_new_end = T::default();
1015 'outer: for entry in diagnostics {
1016 let mut start = entry.range.start;
1017 let mut end = entry.range.end;
1018
1019 // Some diagnostics are based on files on disk instead of buffers'
1020 // current contents. Adjust these diagnostics' ranges to reflect
1021 // any unsaved edits.
1022 if entry.diagnostic.is_disk_based {
1023 while let Some(edit) = edits_since_save.peek() {
1024 if edit.old.end <= start {
1025 last_edit_old_end = edit.old.end;
1026 last_edit_new_end = edit.new.end;
1027 edits_since_save.next();
1028 } else if edit.old.start <= end && edit.old.end >= start {
1029 continue 'outer;
1030 } else {
1031 break;
1032 }
1033 }
1034
1035 let start_overshoot = start - last_edit_old_end;
1036 start = last_edit_new_end;
1037 start.add_assign(&start_overshoot);
1038
1039 let end_overshoot = end - last_edit_old_end;
1040 end = last_edit_new_end;
1041 end.add_assign(&end_overshoot);
1042 }
1043
1044 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
1045 let mut range = range.start.to_point(content)..range.end.to_point(content);
1046 // Expand empty ranges by one character
1047 if range.start == range.end {
1048 range.end.column += 1;
1049 range.end = content.clip_point(range.end, Bias::Right);
1050 if range.start == range.end && range.end.column > 0 {
1051 range.start.column -= 1;
1052 range.start = content.clip_point(range.start, Bias::Left);
1053 }
1054 }
1055
1056 sanitized_diagnostics.push(DiagnosticEntry {
1057 range,
1058 diagnostic: entry.diagnostic,
1059 });
1060 }
1061 drop(edits_since_save);
1062
1063 let set = DiagnosticSet::new(sanitized_diagnostics, content);
1064 self.apply_diagnostic_update(set.clone(), cx);
1065
1066 let op = Operation::UpdateDiagnostics {
1067 diagnostics: set.iter().cloned().collect(),
1068 lamport_timestamp: self.text.lamport_clock.tick(),
1069 };
1070 self.send_operation(op, cx);
1071 Ok(())
1072 }
1073
1074 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1075 if let Some(indent_columns) = self.compute_autoindents() {
1076 let indent_columns = cx.background().spawn(indent_columns);
1077 match cx
1078 .background()
1079 .block_with_timeout(Duration::from_micros(500), indent_columns)
1080 {
1081 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
1082 Err(indent_columns) => {
1083 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1084 let indent_columns = indent_columns.await;
1085 this.update(&mut cx, |this, cx| {
1086 this.apply_autoindents(indent_columns, cx);
1087 });
1088 }));
1089 }
1090 }
1091 }
1092 }
1093
1094 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
1095 let max_rows_between_yields = 100;
1096 let snapshot = self.snapshot();
1097 if snapshot.language.is_none()
1098 || snapshot.tree.is_none()
1099 || self.autoindent_requests.is_empty()
1100 {
1101 return None;
1102 }
1103
1104 let autoindent_requests = self.autoindent_requests.clone();
1105 Some(async move {
1106 let mut indent_columns = BTreeMap::new();
1107 for request in autoindent_requests {
1108 let old_to_new_rows = request
1109 .edited
1110 .iter()
1111 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1112 .zip(
1113 request
1114 .edited
1115 .iter()
1116 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1117 )
1118 .collect::<BTreeMap<u32, u32>>();
1119
1120 let mut old_suggestions = HashMap::<u32, u32>::default();
1121 let old_edited_ranges =
1122 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1123 for old_edited_range in old_edited_ranges {
1124 let suggestions = request
1125 .before_edit
1126 .suggest_autoindents(old_edited_range.clone())
1127 .into_iter()
1128 .flatten();
1129 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1130 let indentation_basis = old_to_new_rows
1131 .get(&suggestion.basis_row)
1132 .and_then(|from_row| old_suggestions.get(from_row).copied())
1133 .unwrap_or_else(|| {
1134 request
1135 .before_edit
1136 .indent_column_for_line(suggestion.basis_row)
1137 });
1138 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1139 old_suggestions.insert(
1140 *old_to_new_rows.get(&old_row).unwrap(),
1141 indentation_basis + delta,
1142 );
1143 }
1144 yield_now().await;
1145 }
1146
1147 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1148 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1149 let new_edited_row_ranges =
1150 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1151 for new_edited_row_range in new_edited_row_ranges {
1152 let suggestions = snapshot
1153 .suggest_autoindents(new_edited_row_range.clone())
1154 .into_iter()
1155 .flatten();
1156 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1157 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1158 let new_indentation = indent_columns
1159 .get(&suggestion.basis_row)
1160 .copied()
1161 .unwrap_or_else(|| {
1162 snapshot.indent_column_for_line(suggestion.basis_row)
1163 })
1164 + delta;
1165 if old_suggestions
1166 .get(&new_row)
1167 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1168 {
1169 indent_columns.insert(new_row, new_indentation);
1170 }
1171 }
1172 yield_now().await;
1173 }
1174
1175 if let Some(inserted) = request.inserted.as_ref() {
1176 let inserted_row_ranges = contiguous_ranges(
1177 inserted
1178 .iter()
1179 .map(|range| range.to_point(&snapshot))
1180 .flat_map(|range| range.start.row..range.end.row + 1),
1181 max_rows_between_yields,
1182 );
1183 for inserted_row_range in inserted_row_ranges {
1184 let suggestions = snapshot
1185 .suggest_autoindents(inserted_row_range.clone())
1186 .into_iter()
1187 .flatten();
1188 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1189 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1190 let new_indentation = indent_columns
1191 .get(&suggestion.basis_row)
1192 .copied()
1193 .unwrap_or_else(|| {
1194 snapshot.indent_column_for_line(suggestion.basis_row)
1195 })
1196 + delta;
1197 indent_columns.insert(row, new_indentation);
1198 }
1199 yield_now().await;
1200 }
1201 }
1202 }
1203 indent_columns
1204 })
1205 }
1206
1207 fn apply_autoindents(
1208 &mut self,
1209 indent_columns: BTreeMap<u32, u32>,
1210 cx: &mut ModelContext<Self>,
1211 ) {
1212 self.autoindent_requests.clear();
1213 self.start_transaction();
1214 for (row, indent_column) in &indent_columns {
1215 self.set_indent_column_for_line(*row, *indent_column, cx);
1216 }
1217 self.end_transaction(cx);
1218 }
1219
1220 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1221 let current_column = self.indent_column_for_line(row);
1222 if column > current_column {
1223 let offset = Point::new(row, 0).to_offset(&*self);
1224 self.edit(
1225 [offset..offset],
1226 " ".repeat((column - current_column) as usize),
1227 cx,
1228 );
1229 } else if column < current_column {
1230 self.edit(
1231 [Point::new(row, 0)..Point::new(row, current_column - column)],
1232 "",
1233 cx,
1234 );
1235 }
1236 }
1237
1238 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1239 // TODO: it would be nice to not allocate here.
1240 let old_text = self.text();
1241 let base_version = self.version();
1242 cx.background().spawn(async move {
1243 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1244 .iter_all_changes()
1245 .map(|c| (c.tag(), c.value().len()))
1246 .collect::<Vec<_>>();
1247 Diff {
1248 base_version,
1249 new_text,
1250 changes,
1251 }
1252 })
1253 }
1254
1255 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1256 if self.version == diff.base_version {
1257 self.start_transaction();
1258 let mut offset = 0;
1259 for (tag, len) in diff.changes {
1260 let range = offset..(offset + len);
1261 match tag {
1262 ChangeTag::Equal => offset += len,
1263 ChangeTag::Delete => self.edit(Some(range), "", cx),
1264 ChangeTag::Insert => {
1265 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1266 offset += len;
1267 }
1268 }
1269 }
1270 self.end_transaction(cx);
1271 true
1272 } else {
1273 false
1274 }
1275 }
1276
1277 pub fn is_dirty(&self) -> bool {
1278 !self.saved_version.observed_all(&self.version)
1279 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1280 }
1281
1282 pub fn has_conflict(&self) -> bool {
1283 !self.saved_version.observed_all(&self.version)
1284 && self
1285 .file
1286 .as_ref()
1287 .map_or(false, |file| file.mtime() > self.saved_mtime)
1288 }
1289
1290 pub fn subscribe(&mut self) -> Subscription {
1291 self.text.subscribe()
1292 }
1293
1294 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1295 self.start_transaction_at(Instant::now())
1296 }
1297
1298 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1299 self.text.start_transaction_at(now)
1300 }
1301
1302 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1303 self.end_transaction_at(Instant::now(), cx)
1304 }
1305
1306 pub fn end_transaction_at(
1307 &mut self,
1308 now: Instant,
1309 cx: &mut ModelContext<Self>,
1310 ) -> Option<TransactionId> {
1311 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1312 let was_dirty = start_version != self.saved_version;
1313 self.did_edit(&start_version, was_dirty, cx);
1314 Some(transaction_id)
1315 } else {
1316 None
1317 }
1318 }
1319
1320 pub fn avoid_grouping_next_transaction(&mut self) {
1321 self.text.avoid_grouping_next_transaction();
1322 }
1323
1324 pub fn set_active_selections(
1325 &mut self,
1326 selections: Arc<[Selection<Anchor>]>,
1327 cx: &mut ModelContext<Self>,
1328 ) {
1329 let lamport_timestamp = self.text.lamport_clock.tick();
1330 self.remote_selections.insert(
1331 self.text.replica_id(),
1332 SelectionSet {
1333 selections: selections.clone(),
1334 lamport_timestamp,
1335 },
1336 );
1337 self.send_operation(
1338 Operation::UpdateSelections {
1339 replica_id: self.text.replica_id(),
1340 selections,
1341 lamport_timestamp,
1342 },
1343 cx,
1344 );
1345 }
1346
1347 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1348 self.set_active_selections(Arc::from([]), cx);
1349 }
1350
1351 fn update_language_server(&mut self, cx: &AppContext) {
1352 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1353 language_server
1354 } else {
1355 return;
1356 };
1357 let abs_path = self
1358 .file
1359 .as_ref()
1360 .and_then(|f| f.as_local())
1361 .map_or(Path::new("/").to_path_buf(), |file| file.abs_path(cx));
1362
1363 let version = post_inc(&mut language_server.next_version);
1364 let snapshot = LanguageServerSnapshot {
1365 buffer_snapshot: self.text.snapshot(),
1366 version,
1367 path: Arc::from(abs_path),
1368 };
1369 language_server
1370 .pending_snapshots
1371 .insert(version, snapshot.clone());
1372 let _ = language_server
1373 .latest_snapshot
1374 .blocking_send(Some(snapshot));
1375 }
1376
1377 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1378 where
1379 I: IntoIterator<Item = Range<S>>,
1380 S: ToOffset,
1381 T: Into<String>,
1382 {
1383 self.edit_internal(ranges_iter, new_text, false, cx)
1384 }
1385
1386 pub fn edit_with_autoindent<I, S, T>(
1387 &mut self,
1388 ranges_iter: I,
1389 new_text: T,
1390 cx: &mut ModelContext<Self>,
1391 ) where
1392 I: IntoIterator<Item = Range<S>>,
1393 S: ToOffset,
1394 T: Into<String>,
1395 {
1396 self.edit_internal(ranges_iter, new_text, true, cx)
1397 }
1398
1399 pub fn edit_internal<I, S, T>(
1400 &mut self,
1401 ranges_iter: I,
1402 new_text: T,
1403 autoindent: bool,
1404 cx: &mut ModelContext<Self>,
1405 ) where
1406 I: IntoIterator<Item = Range<S>>,
1407 S: ToOffset,
1408 T: Into<String>,
1409 {
1410 let new_text = new_text.into();
1411
1412 // Skip invalid ranges and coalesce contiguous ones.
1413 let mut ranges: Vec<Range<usize>> = Vec::new();
1414 for range in ranges_iter {
1415 let range = range.start.to_offset(self)..range.end.to_offset(self);
1416 if !new_text.is_empty() || !range.is_empty() {
1417 if let Some(prev_range) = ranges.last_mut() {
1418 if prev_range.end >= range.start {
1419 prev_range.end = cmp::max(prev_range.end, range.end);
1420 } else {
1421 ranges.push(range);
1422 }
1423 } else {
1424 ranges.push(range);
1425 }
1426 }
1427 }
1428 if ranges.is_empty() {
1429 return;
1430 }
1431
1432 self.start_transaction();
1433 self.pending_autoindent.take();
1434 let autoindent_request = if autoindent && self.language.is_some() {
1435 let before_edit = self.snapshot();
1436 let edited = ranges
1437 .iter()
1438 .filter_map(|range| {
1439 let start = range.start.to_point(self);
1440 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1441 None
1442 } else {
1443 Some(self.anchor_before(range.start))
1444 }
1445 })
1446 .collect();
1447 Some((before_edit, edited))
1448 } else {
1449 None
1450 };
1451
1452 let first_newline_ix = new_text.find('\n');
1453 let new_text_len = new_text.len();
1454
1455 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1456
1457 if let Some((before_edit, edited)) = autoindent_request {
1458 let mut inserted = None;
1459 if let Some(first_newline_ix) = first_newline_ix {
1460 let mut delta = 0isize;
1461 inserted = Some(
1462 ranges
1463 .iter()
1464 .map(|range| {
1465 let start =
1466 (delta + range.start as isize) as usize + first_newline_ix + 1;
1467 let end = (delta + range.start as isize) as usize + new_text_len;
1468 delta +=
1469 (range.end as isize - range.start as isize) + new_text_len as isize;
1470 self.anchor_before(start)..self.anchor_after(end)
1471 })
1472 .collect(),
1473 );
1474 }
1475
1476 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1477 before_edit,
1478 edited,
1479 inserted,
1480 }));
1481 }
1482
1483 self.end_transaction(cx);
1484 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1485 }
1486
1487 fn apply_lsp_edits(
1488 &mut self,
1489 edits: Vec<lsp::TextEdit>,
1490 cx: &mut ModelContext<Self>,
1491 ) -> Result<()> {
1492 for edit in &edits {
1493 let range = range_from_lsp(edit.range);
1494 if self.clip_point_utf16(range.start, Bias::Left) != range.start
1495 || self.clip_point_utf16(range.end, Bias::Left) != range.end
1496 {
1497 return Err(anyhow!(
1498 "invalid formatting edits received from language server"
1499 ));
1500 }
1501 }
1502
1503 for edit in edits.into_iter().rev() {
1504 self.edit([range_from_lsp(edit.range)], edit.new_text, cx);
1505 }
1506
1507 Ok(())
1508 }
1509
1510 fn did_edit(
1511 &mut self,
1512 old_version: &clock::Global,
1513 was_dirty: bool,
1514 cx: &mut ModelContext<Self>,
1515 ) {
1516 if self.edits_since::<usize>(old_version).next().is_none() {
1517 return;
1518 }
1519
1520 self.reparse(cx);
1521 self.update_language_server(cx);
1522
1523 cx.emit(Event::Edited);
1524 if !was_dirty {
1525 cx.emit(Event::Dirtied);
1526 }
1527 cx.notify();
1528 }
1529
1530 fn grammar(&self) -> Option<&Arc<Grammar>> {
1531 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1532 }
1533
1534 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1535 &mut self,
1536 ops: I,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Result<()> {
1539 self.pending_autoindent.take();
1540 let was_dirty = self.is_dirty();
1541 let old_version = self.version.clone();
1542 let mut deferred_ops = Vec::new();
1543 let buffer_ops = ops
1544 .into_iter()
1545 .filter_map(|op| match op {
1546 Operation::Buffer(op) => Some(op),
1547 _ => {
1548 if self.can_apply_op(&op) {
1549 self.apply_op(op, cx);
1550 } else {
1551 deferred_ops.push(op);
1552 }
1553 None
1554 }
1555 })
1556 .collect::<Vec<_>>();
1557 self.text.apply_ops(buffer_ops)?;
1558 self.deferred_ops.insert(deferred_ops);
1559 self.flush_deferred_ops(cx);
1560 self.did_edit(&old_version, was_dirty, cx);
1561 // Notify independently of whether the buffer was edited as the operations could include a
1562 // selection update.
1563 cx.notify();
1564 Ok(())
1565 }
1566
1567 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1568 let mut deferred_ops = Vec::new();
1569 for op in self.deferred_ops.drain().iter().cloned() {
1570 if self.can_apply_op(&op) {
1571 self.apply_op(op, cx);
1572 } else {
1573 deferred_ops.push(op);
1574 }
1575 }
1576 self.deferred_ops.insert(deferred_ops);
1577 }
1578
1579 fn can_apply_op(&self, operation: &Operation) -> bool {
1580 match operation {
1581 Operation::Buffer(_) => {
1582 unreachable!("buffer operations should never be applied at this layer")
1583 }
1584 Operation::UpdateDiagnostics {
1585 diagnostics: diagnostic_set,
1586 ..
1587 } => diagnostic_set.iter().all(|diagnostic| {
1588 self.text.can_resolve(&diagnostic.range.start)
1589 && self.text.can_resolve(&diagnostic.range.end)
1590 }),
1591 Operation::UpdateSelections { selections, .. } => selections
1592 .iter()
1593 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1594 }
1595 }
1596
1597 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1598 match operation {
1599 Operation::Buffer(_) => {
1600 unreachable!("buffer operations should never be applied at this layer")
1601 }
1602 Operation::UpdateDiagnostics {
1603 diagnostics: diagnostic_set,
1604 ..
1605 } => {
1606 let snapshot = self.snapshot();
1607 self.apply_diagnostic_update(
1608 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1609 cx,
1610 );
1611 }
1612 Operation::UpdateSelections {
1613 replica_id,
1614 selections,
1615 lamport_timestamp,
1616 } => {
1617 if let Some(set) = self.remote_selections.get(&replica_id) {
1618 if set.lamport_timestamp > lamport_timestamp {
1619 return;
1620 }
1621 }
1622
1623 self.remote_selections.insert(
1624 replica_id,
1625 SelectionSet {
1626 selections,
1627 lamport_timestamp,
1628 },
1629 );
1630 self.text.lamport_clock.observe(lamport_timestamp);
1631 self.selections_update_count += 1;
1632 }
1633 }
1634 }
1635
1636 fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
1637 self.diagnostics = diagnostics;
1638 self.diagnostics_update_count += 1;
1639 cx.notify();
1640 cx.emit(Event::DiagnosticsUpdated);
1641 }
1642
1643 #[cfg(not(test))]
1644 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1645 if let Some(file) = &self.file {
1646 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1647 }
1648 }
1649
1650 #[cfg(test)]
1651 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1652 self.operations.push(operation);
1653 }
1654
1655 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1656 self.remote_selections.remove(&replica_id);
1657 cx.notify();
1658 }
1659
1660 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1661 let was_dirty = self.is_dirty();
1662 let old_version = self.version.clone();
1663
1664 if let Some((transaction_id, operation)) = self.text.undo() {
1665 self.send_operation(Operation::Buffer(operation), cx);
1666 self.did_edit(&old_version, was_dirty, cx);
1667 Some(transaction_id)
1668 } else {
1669 None
1670 }
1671 }
1672
1673 pub fn undo_transaction(
1674 &mut self,
1675 transaction_id: TransactionId,
1676 cx: &mut ModelContext<Self>,
1677 ) -> bool {
1678 let was_dirty = self.is_dirty();
1679 let old_version = self.version.clone();
1680
1681 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1682 self.send_operation(Operation::Buffer(operation), cx);
1683 self.did_edit(&old_version, was_dirty, cx);
1684 true
1685 } else {
1686 false
1687 }
1688 }
1689
1690 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1691 let was_dirty = self.is_dirty();
1692 let old_version = self.version.clone();
1693
1694 if let Some((transaction_id, operation)) = self.text.redo() {
1695 self.send_operation(Operation::Buffer(operation), cx);
1696 self.did_edit(&old_version, was_dirty, cx);
1697 Some(transaction_id)
1698 } else {
1699 None
1700 }
1701 }
1702
1703 pub fn redo_transaction(
1704 &mut self,
1705 transaction_id: TransactionId,
1706 cx: &mut ModelContext<Self>,
1707 ) -> bool {
1708 let was_dirty = self.is_dirty();
1709 let old_version = self.version.clone();
1710
1711 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1712 self.send_operation(Operation::Buffer(operation), cx);
1713 self.did_edit(&old_version, was_dirty, cx);
1714 true
1715 } else {
1716 false
1717 }
1718 }
1719
1720 pub fn completions<T>(
1721 &self,
1722 position: T,
1723 cx: &mut ModelContext<Self>,
1724 ) -> Task<Result<Vec<Completion<Anchor>>>>
1725 where
1726 T: ToOffset,
1727 {
1728 let file = if let Some(file) = self.file.as_ref() {
1729 file
1730 } else {
1731 return Task::ready(Ok(Default::default()));
1732 };
1733
1734 if let Some(file) = file.as_local() {
1735 let server = if let Some(lang) = self.language_server.as_ref() {
1736 lang.server.clone()
1737 } else {
1738 return Task::ready(Ok(Default::default()));
1739 };
1740 let abs_path = file.abs_path(cx);
1741 let position = self.offset_to_point_utf16(position.to_offset(self));
1742
1743 cx.spawn(|this, cx| async move {
1744 let completions = server
1745 .request::<lsp::request::Completion>(lsp::CompletionParams {
1746 text_document_position: lsp::TextDocumentPositionParams::new(
1747 lsp::TextDocumentIdentifier::new(
1748 lsp::Url::from_file_path(abs_path).unwrap(),
1749 ),
1750 position.to_lsp_position(),
1751 ),
1752 context: Default::default(),
1753 work_done_progress_params: Default::default(),
1754 partial_result_params: Default::default(),
1755 })
1756 .await?;
1757
1758 let completions = if let Some(completions) = completions {
1759 match completions {
1760 lsp::CompletionResponse::Array(completions) => completions,
1761 lsp::CompletionResponse::List(list) => list.items,
1762 }
1763 } else {
1764 Default::default()
1765 };
1766
1767 this.read_with(&cx, |this, _| {
1768 Ok(completions.into_iter().filter_map(|lsp_completion| {
1769 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1770 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1771 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1772 log::info!("received an insert and replace completion but we don't yet support that");
1773 return None
1774 },
1775 };
1776
1777 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1778 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1779 if clipped_start == old_range.start && clipped_end == old_range.end {
1780 Some(Completion {
1781 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1782 new_text,
1783 lsp_completion,
1784 })
1785 } else {
1786 None
1787 }
1788 }).collect())
1789 })
1790 })
1791 } else {
1792 file.completions(self.remote_id(), self.anchor_before(position), cx.as_mut())
1793 }
1794 }
1795
1796 pub fn apply_additional_edits_for_completion(
1797 &mut self,
1798 completion: Completion<Anchor>,
1799 cx: &mut ModelContext<Self>,
1800 ) -> Option<Task<Result<()>>> {
1801 self.file.as_ref()?.as_local()?;
1802 let server = self.language_server.as_ref()?.server.clone();
1803 Some(cx.spawn(|this, mut cx| async move {
1804 let resolved_completion = server
1805 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1806 .await?;
1807 if let Some(additional_edits) = resolved_completion.additional_text_edits {
1808 this.update(&mut cx, |this, cx| {
1809 this.apply_lsp_edits(additional_edits, cx)
1810 })?;
1811 }
1812 Ok::<_, anyhow::Error>(())
1813 }))
1814 }
1815}
1816
1817#[cfg(any(test, feature = "test-support"))]
1818impl Buffer {
1819 pub fn set_group_interval(&mut self, group_interval: Duration) {
1820 self.text.set_group_interval(group_interval);
1821 }
1822
1823 pub fn randomly_edit<T>(
1824 &mut self,
1825 rng: &mut T,
1826 old_range_count: usize,
1827 cx: &mut ModelContext<Self>,
1828 ) where
1829 T: rand::Rng,
1830 {
1831 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1832 for _ in 0..old_range_count {
1833 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1834 if last_end > self.len() {
1835 break;
1836 }
1837 old_ranges.push(self.text.random_byte_range(last_end, rng));
1838 }
1839 let new_text_len = rng.gen_range(0..10);
1840 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1841 .take(new_text_len)
1842 .collect();
1843 log::info!(
1844 "mutating buffer {} at {:?}: {:?}",
1845 self.replica_id(),
1846 old_ranges,
1847 new_text
1848 );
1849 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1850 }
1851
1852 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1853 let was_dirty = self.is_dirty();
1854 let old_version = self.version.clone();
1855
1856 let ops = self.text.randomly_undo_redo(rng);
1857 if !ops.is_empty() {
1858 for op in ops {
1859 self.send_operation(Operation::Buffer(op), cx);
1860 self.did_edit(&old_version, was_dirty, cx);
1861 }
1862 }
1863 }
1864}
1865
1866impl Entity for Buffer {
1867 type Event = Event;
1868
1869 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1870 if let Some(file) = self.file.as_ref() {
1871 file.buffer_removed(self.remote_id(), cx);
1872 }
1873 }
1874}
1875
1876impl Deref for Buffer {
1877 type Target = TextBuffer;
1878
1879 fn deref(&self) -> &Self::Target {
1880 &self.text
1881 }
1882}
1883
1884impl BufferSnapshot {
1885 fn suggest_autoindents<'a>(
1886 &'a self,
1887 row_range: Range<u32>,
1888 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1889 let mut query_cursor = QueryCursorHandle::new();
1890 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1891 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1892
1893 // Get the "indentation ranges" that intersect this row range.
1894 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1895 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1896 query_cursor.set_point_range(
1897 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1898 ..Point::new(row_range.end, 0).to_ts_point(),
1899 );
1900 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1901 for mat in query_cursor.matches(
1902 &grammar.indents_query,
1903 tree.root_node(),
1904 TextProvider(self.as_rope()),
1905 ) {
1906 let mut node_kind = "";
1907 let mut start: Option<Point> = None;
1908 let mut end: Option<Point> = None;
1909 for capture in mat.captures {
1910 if Some(capture.index) == indent_capture_ix {
1911 node_kind = capture.node.kind();
1912 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1913 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1914 } else if Some(capture.index) == end_capture_ix {
1915 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1916 }
1917 }
1918
1919 if let Some((start, end)) = start.zip(end) {
1920 if start.row == end.row {
1921 continue;
1922 }
1923
1924 let range = start..end;
1925 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1926 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1927 Ok(ix) => {
1928 let prev_range = &mut indentation_ranges[ix];
1929 prev_range.0.end = prev_range.0.end.max(range.end);
1930 }
1931 }
1932 }
1933 }
1934
1935 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1936 Some(row_range.map(move |row| {
1937 let row_start = Point::new(row, self.indent_column_for_line(row));
1938
1939 let mut indent_from_prev_row = false;
1940 let mut outdent_to_row = u32::MAX;
1941 for (range, _node_kind) in &indentation_ranges {
1942 if range.start.row >= row {
1943 break;
1944 }
1945
1946 if range.start.row == prev_row && range.end > row_start {
1947 indent_from_prev_row = true;
1948 }
1949 if range.end.row >= prev_row && range.end <= row_start {
1950 outdent_to_row = outdent_to_row.min(range.start.row);
1951 }
1952 }
1953
1954 let suggestion = if outdent_to_row == prev_row {
1955 IndentSuggestion {
1956 basis_row: prev_row,
1957 indent: false,
1958 }
1959 } else if indent_from_prev_row {
1960 IndentSuggestion {
1961 basis_row: prev_row,
1962 indent: true,
1963 }
1964 } else if outdent_to_row < prev_row {
1965 IndentSuggestion {
1966 basis_row: outdent_to_row,
1967 indent: false,
1968 }
1969 } else {
1970 IndentSuggestion {
1971 basis_row: prev_row,
1972 indent: false,
1973 }
1974 };
1975
1976 prev_row = row;
1977 suggestion
1978 }))
1979 } else {
1980 None
1981 }
1982 }
1983
1984 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1985 while row > 0 {
1986 row -= 1;
1987 if !self.is_line_blank(row) {
1988 return Some(row);
1989 }
1990 }
1991 None
1992 }
1993
1994 pub fn chunks<'a, T: ToOffset>(
1995 &'a self,
1996 range: Range<T>,
1997 theme: Option<&'a SyntaxTheme>,
1998 ) -> BufferChunks<'a> {
1999 let range = range.start.to_offset(self)..range.end.to_offset(self);
2000
2001 let mut highlights = None;
2002 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
2003 if let Some(theme) = theme {
2004 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
2005 diagnostic_endpoints.push(DiagnosticEndpoint {
2006 offset: entry.range.start,
2007 is_start: true,
2008 severity: entry.diagnostic.severity,
2009 });
2010 diagnostic_endpoints.push(DiagnosticEndpoint {
2011 offset: entry.range.end,
2012 is_start: false,
2013 severity: entry.diagnostic.severity,
2014 });
2015 }
2016 diagnostic_endpoints
2017 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2018
2019 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
2020 let mut query_cursor = QueryCursorHandle::new();
2021
2022 // TODO - add a Tree-sitter API to remove the need for this.
2023 let cursor = unsafe {
2024 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
2025 };
2026 let captures = cursor.set_byte_range(range.clone()).captures(
2027 &grammar.highlights_query,
2028 tree.root_node(),
2029 TextProvider(self.text.as_rope()),
2030 );
2031 highlights = Some(BufferChunkHighlights {
2032 captures,
2033 next_capture: None,
2034 stack: Default::default(),
2035 highlight_map: grammar.highlight_map(),
2036 _query_cursor: query_cursor,
2037 theme,
2038 })
2039 }
2040 }
2041
2042 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2043 let chunks = self.text.as_rope().chunks_in_range(range.clone());
2044
2045 BufferChunks {
2046 range,
2047 chunks,
2048 diagnostic_endpoints,
2049 error_depth: 0,
2050 warning_depth: 0,
2051 information_depth: 0,
2052 hint_depth: 0,
2053 highlights,
2054 }
2055 }
2056
2057 pub fn language(&self) -> Option<&Arc<Language>> {
2058 self.language.as_ref()
2059 }
2060
2061 fn grammar(&self) -> Option<&Arc<Grammar>> {
2062 self.language
2063 .as_ref()
2064 .and_then(|language| language.grammar.as_ref())
2065 }
2066
2067 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2068 if let Some(tree) = self.tree.as_ref() {
2069 let root = tree.root_node();
2070 let range = range.start.to_offset(self)..range.end.to_offset(self);
2071 let mut node = root.descendant_for_byte_range(range.start, range.end);
2072 while node.map_or(false, |n| n.byte_range() == range) {
2073 node = node.unwrap().parent();
2074 }
2075 node.map(|n| n.byte_range())
2076 } else {
2077 None
2078 }
2079 }
2080
2081 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2082 let tree = self.tree.as_ref()?;
2083 let grammar = self
2084 .language
2085 .as_ref()
2086 .and_then(|language| language.grammar.as_ref())?;
2087
2088 let mut cursor = QueryCursorHandle::new();
2089 let matches = cursor.matches(
2090 &grammar.outline_query,
2091 tree.root_node(),
2092 TextProvider(self.as_rope()),
2093 );
2094
2095 let mut chunks = self.chunks(0..self.len(), theme);
2096
2097 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
2098 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
2099 let context_capture_ix = grammar
2100 .outline_query
2101 .capture_index_for_name("context")
2102 .unwrap_or(u32::MAX);
2103
2104 let mut stack = Vec::<Range<usize>>::new();
2105 let items = matches
2106 .filter_map(|mat| {
2107 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
2108 let range = item_node.start_byte()..item_node.end_byte();
2109 let mut text = String::new();
2110 let mut name_ranges = Vec::new();
2111 let mut highlight_ranges = Vec::new();
2112
2113 for capture in mat.captures {
2114 let node_is_name;
2115 if capture.index == name_capture_ix {
2116 node_is_name = true;
2117 } else if capture.index == context_capture_ix {
2118 node_is_name = false;
2119 } else {
2120 continue;
2121 }
2122
2123 let range = capture.node.start_byte()..capture.node.end_byte();
2124 if !text.is_empty() {
2125 text.push(' ');
2126 }
2127 if node_is_name {
2128 let mut start = text.len();
2129 let end = start + range.len();
2130
2131 // When multiple names are captured, then the matcheable text
2132 // includes the whitespace in between the names.
2133 if !name_ranges.is_empty() {
2134 start -= 1;
2135 }
2136
2137 name_ranges.push(start..end);
2138 }
2139
2140 let mut offset = range.start;
2141 chunks.seek(offset);
2142 while let Some(mut chunk) = chunks.next() {
2143 if chunk.text.len() > range.end - offset {
2144 chunk.text = &chunk.text[0..(range.end - offset)];
2145 offset = range.end;
2146 } else {
2147 offset += chunk.text.len();
2148 }
2149 if let Some(style) = chunk.highlight_style {
2150 let start = text.len();
2151 let end = start + chunk.text.len();
2152 highlight_ranges.push((start..end, style));
2153 }
2154 text.push_str(chunk.text);
2155 if offset >= range.end {
2156 break;
2157 }
2158 }
2159 }
2160
2161 while stack.last().map_or(false, |prev_range| {
2162 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
2163 }) {
2164 stack.pop();
2165 }
2166 stack.push(range.clone());
2167
2168 Some(OutlineItem {
2169 depth: stack.len() - 1,
2170 range: self.anchor_after(range.start)..self.anchor_before(range.end),
2171 text,
2172 highlight_ranges,
2173 name_ranges,
2174 })
2175 })
2176 .collect::<Vec<_>>();
2177
2178 if items.is_empty() {
2179 None
2180 } else {
2181 Some(Outline::new(items))
2182 }
2183 }
2184
2185 pub fn enclosing_bracket_ranges<T: ToOffset>(
2186 &self,
2187 range: Range<T>,
2188 ) -> Option<(Range<usize>, Range<usize>)> {
2189 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
2190 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
2191 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
2192
2193 // Find bracket pairs that *inclusively* contain the given range.
2194 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
2195 let mut cursor = QueryCursorHandle::new();
2196 let matches = cursor.set_byte_range(range).matches(
2197 &grammar.brackets_query,
2198 tree.root_node(),
2199 TextProvider(self.as_rope()),
2200 );
2201
2202 // Get the ranges of the innermost pair of brackets.
2203 matches
2204 .filter_map(|mat| {
2205 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
2206 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
2207 Some((open.byte_range(), close.byte_range()))
2208 })
2209 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
2210 }
2211
2212 /*
2213 impl BufferSnapshot
2214 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
2215 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
2216 */
2217
2218 pub fn remote_selections_in_range<'a>(
2219 &'a self,
2220 range: Range<Anchor>,
2221 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
2222 {
2223 self.remote_selections
2224 .iter()
2225 .filter(|(replica_id, set)| {
2226 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2227 })
2228 .map(move |(replica_id, set)| {
2229 let start_ix = match set.selections.binary_search_by(|probe| {
2230 probe
2231 .end
2232 .cmp(&range.start, self)
2233 .unwrap()
2234 .then(Ordering::Greater)
2235 }) {
2236 Ok(ix) | Err(ix) => ix,
2237 };
2238 let end_ix = match set.selections.binary_search_by(|probe| {
2239 probe
2240 .start
2241 .cmp(&range.end, self)
2242 .unwrap()
2243 .then(Ordering::Less)
2244 }) {
2245 Ok(ix) | Err(ix) => ix,
2246 };
2247
2248 (*replica_id, set.selections[start_ix..end_ix].iter())
2249 })
2250 }
2251
2252 pub fn diagnostics_in_range<'a, T, O>(
2253 &'a self,
2254 search_range: Range<T>,
2255 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2256 where
2257 T: 'a + Clone + ToOffset,
2258 O: 'a + FromAnchor,
2259 {
2260 self.diagnostics.range(search_range.clone(), self, true)
2261 }
2262
2263 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2264 let mut groups = Vec::new();
2265 self.diagnostics.groups(&mut groups, self);
2266 groups
2267 }
2268
2269 pub fn diagnostic_group<'a, O>(
2270 &'a self,
2271 group_id: usize,
2272 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2273 where
2274 O: 'a + FromAnchor,
2275 {
2276 self.diagnostics.group(group_id, self)
2277 }
2278
2279 pub fn diagnostics_update_count(&self) -> usize {
2280 self.diagnostics_update_count
2281 }
2282
2283 pub fn parse_count(&self) -> usize {
2284 self.parse_count
2285 }
2286
2287 pub fn selections_update_count(&self) -> usize {
2288 self.selections_update_count
2289 }
2290}
2291
2292impl Clone for BufferSnapshot {
2293 fn clone(&self) -> Self {
2294 Self {
2295 text: self.text.clone(),
2296 tree: self.tree.clone(),
2297 remote_selections: self.remote_selections.clone(),
2298 diagnostics: self.diagnostics.clone(),
2299 selections_update_count: self.selections_update_count,
2300 diagnostics_update_count: self.diagnostics_update_count,
2301 is_parsing: self.is_parsing,
2302 language: self.language.clone(),
2303 parse_count: self.parse_count,
2304 }
2305 }
2306}
2307
2308impl Deref for BufferSnapshot {
2309 type Target = text::BufferSnapshot;
2310
2311 fn deref(&self) -> &Self::Target {
2312 &self.text
2313 }
2314}
2315
2316impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2317 type I = ByteChunks<'a>;
2318
2319 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2320 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2321 }
2322}
2323
2324struct ByteChunks<'a>(rope::Chunks<'a>);
2325
2326impl<'a> Iterator for ByteChunks<'a> {
2327 type Item = &'a [u8];
2328
2329 fn next(&mut self) -> Option<Self::Item> {
2330 self.0.next().map(str::as_bytes)
2331 }
2332}
2333
2334unsafe impl<'a> Send for BufferChunks<'a> {}
2335
2336impl<'a> BufferChunks<'a> {
2337 pub fn seek(&mut self, offset: usize) {
2338 self.range.start = offset;
2339 self.chunks.seek(self.range.start);
2340 if let Some(highlights) = self.highlights.as_mut() {
2341 highlights
2342 .stack
2343 .retain(|(end_offset, _)| *end_offset > offset);
2344 if let Some((mat, capture_ix)) = &highlights.next_capture {
2345 let capture = mat.captures[*capture_ix as usize];
2346 if offset >= capture.node.start_byte() {
2347 let next_capture_end = capture.node.end_byte();
2348 if offset < next_capture_end {
2349 highlights.stack.push((
2350 next_capture_end,
2351 highlights.highlight_map.get(capture.index),
2352 ));
2353 }
2354 highlights.next_capture.take();
2355 }
2356 }
2357 highlights.captures.set_byte_range(self.range.clone());
2358 }
2359 }
2360
2361 pub fn offset(&self) -> usize {
2362 self.range.start
2363 }
2364
2365 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2366 let depth = match endpoint.severity {
2367 DiagnosticSeverity::ERROR => &mut self.error_depth,
2368 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2369 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2370 DiagnosticSeverity::HINT => &mut self.hint_depth,
2371 _ => return,
2372 };
2373 if endpoint.is_start {
2374 *depth += 1;
2375 } else {
2376 *depth -= 1;
2377 }
2378 }
2379
2380 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2381 if self.error_depth > 0 {
2382 Some(DiagnosticSeverity::ERROR)
2383 } else if self.warning_depth > 0 {
2384 Some(DiagnosticSeverity::WARNING)
2385 } else if self.information_depth > 0 {
2386 Some(DiagnosticSeverity::INFORMATION)
2387 } else if self.hint_depth > 0 {
2388 Some(DiagnosticSeverity::HINT)
2389 } else {
2390 None
2391 }
2392 }
2393}
2394
2395impl<'a> Iterator for BufferChunks<'a> {
2396 type Item = Chunk<'a>;
2397
2398 fn next(&mut self) -> Option<Self::Item> {
2399 let mut next_capture_start = usize::MAX;
2400 let mut next_diagnostic_endpoint = usize::MAX;
2401
2402 if let Some(highlights) = self.highlights.as_mut() {
2403 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2404 if *parent_capture_end <= self.range.start {
2405 highlights.stack.pop();
2406 } else {
2407 break;
2408 }
2409 }
2410
2411 if highlights.next_capture.is_none() {
2412 highlights.next_capture = highlights.captures.next();
2413 }
2414
2415 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2416 let capture = mat.captures[*capture_ix as usize];
2417 if self.range.start < capture.node.start_byte() {
2418 next_capture_start = capture.node.start_byte();
2419 break;
2420 } else {
2421 let highlight_id = highlights.highlight_map.get(capture.index);
2422 highlights
2423 .stack
2424 .push((capture.node.end_byte(), highlight_id));
2425 highlights.next_capture = highlights.captures.next();
2426 }
2427 }
2428 }
2429
2430 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2431 if endpoint.offset <= self.range.start {
2432 self.update_diagnostic_depths(endpoint);
2433 self.diagnostic_endpoints.next();
2434 } else {
2435 next_diagnostic_endpoint = endpoint.offset;
2436 break;
2437 }
2438 }
2439
2440 if let Some(chunk) = self.chunks.peek() {
2441 let chunk_start = self.range.start;
2442 let mut chunk_end = (self.chunks.offset() + chunk.len())
2443 .min(next_capture_start)
2444 .min(next_diagnostic_endpoint);
2445 let mut highlight_style = None;
2446 if let Some(highlights) = self.highlights.as_ref() {
2447 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2448 chunk_end = chunk_end.min(*parent_capture_end);
2449 highlight_style = parent_highlight_id.style(highlights.theme);
2450 }
2451 }
2452
2453 let slice =
2454 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2455 self.range.start = chunk_end;
2456 if self.range.start == self.chunks.offset() + chunk.len() {
2457 self.chunks.next().unwrap();
2458 }
2459
2460 Some(Chunk {
2461 text: slice,
2462 highlight_style,
2463 diagnostic: self.current_diagnostic_severity(),
2464 })
2465 } else {
2466 None
2467 }
2468 }
2469}
2470
2471impl QueryCursorHandle {
2472 pub(crate) fn new() -> Self {
2473 QueryCursorHandle(Some(
2474 QUERY_CURSORS
2475 .lock()
2476 .pop()
2477 .unwrap_or_else(|| QueryCursor::new()),
2478 ))
2479 }
2480}
2481
2482impl Deref for QueryCursorHandle {
2483 type Target = QueryCursor;
2484
2485 fn deref(&self) -> &Self::Target {
2486 self.0.as_ref().unwrap()
2487 }
2488}
2489
2490impl DerefMut for QueryCursorHandle {
2491 fn deref_mut(&mut self) -> &mut Self::Target {
2492 self.0.as_mut().unwrap()
2493 }
2494}
2495
2496impl Drop for QueryCursorHandle {
2497 fn drop(&mut self) {
2498 let mut cursor = self.0.take().unwrap();
2499 cursor.set_byte_range(0..usize::MAX);
2500 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2501 QUERY_CURSORS.lock().push(cursor)
2502 }
2503}
2504
2505trait ToTreeSitterPoint {
2506 fn to_ts_point(self) -> tree_sitter::Point;
2507 fn from_ts_point(point: tree_sitter::Point) -> Self;
2508}
2509
2510impl ToTreeSitterPoint for Point {
2511 fn to_ts_point(self) -> tree_sitter::Point {
2512 tree_sitter::Point::new(self.row as usize, self.column as usize)
2513 }
2514
2515 fn from_ts_point(point: tree_sitter::Point) -> Self {
2516 Point::new(point.row as u32, point.column as u32)
2517 }
2518}
2519
2520impl operation_queue::Operation for Operation {
2521 fn lamport_timestamp(&self) -> clock::Lamport {
2522 match self {
2523 Operation::Buffer(_) => {
2524 unreachable!("buffer operations should never be deferred at this layer")
2525 }
2526 Operation::UpdateDiagnostics {
2527 lamport_timestamp, ..
2528 }
2529 | Operation::UpdateSelections {
2530 lamport_timestamp, ..
2531 } => *lamport_timestamp,
2532 }
2533 }
2534}
2535
2536impl Default for Diagnostic {
2537 fn default() -> Self {
2538 Self {
2539 code: Default::default(),
2540 severity: DiagnosticSeverity::ERROR,
2541 message: Default::default(),
2542 group_id: Default::default(),
2543 is_primary: Default::default(),
2544 is_valid: true,
2545 is_disk_based: false,
2546 }
2547 }
2548}
2549
2550impl<T> Completion<T> {
2551 pub fn label(&self) -> &str {
2552 &self.lsp_completion.label
2553 }
2554
2555 pub fn filter_range(&self) -> Range<usize> {
2556 if let Some(filter_text) = self.lsp_completion.filter_text.as_deref() {
2557 if let Some(start) = self.label().find(filter_text) {
2558 start..start + filter_text.len()
2559 } else {
2560 0..self.label().len()
2561 }
2562 } else {
2563 0..self.label().len()
2564 }
2565 }
2566
2567 pub fn sort_key(&self) -> (usize, &str) {
2568 let kind_key = match self.lsp_completion.kind {
2569 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2570 _ => 1,
2571 };
2572 (kind_key, &self.label()[self.filter_range()])
2573 }
2574
2575 pub fn is_snippet(&self) -> bool {
2576 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2577 }
2578}
2579
2580pub fn contiguous_ranges(
2581 values: impl Iterator<Item = u32>,
2582 max_len: usize,
2583) -> impl Iterator<Item = Range<u32>> {
2584 let mut values = values.into_iter();
2585 let mut current_range: Option<Range<u32>> = None;
2586 std::iter::from_fn(move || loop {
2587 if let Some(value) = values.next() {
2588 if let Some(range) = &mut current_range {
2589 if value == range.end && range.len() < max_len {
2590 range.end += 1;
2591 continue;
2592 }
2593 }
2594
2595 let prev_range = current_range.clone();
2596 current_range = Some(value..(value + 1));
2597 if prev_range.is_some() {
2598 return prev_range;
2599 }
2600 } else {
2601 return current_range.take();
2602 }
2603 })
2604}