1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, Outline, ToLspPosition,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cell::RefCell,
25 cmp::{self, Ordering},
26 collections::{BTreeMap, HashMap},
27 ffi::OsString,
28 future::Future,
29 iter::{Iterator, Peekable},
30 ops::{Deref, DerefMut, Range, Sub},
31 path::{Path, PathBuf},
32 str,
33 sync::Arc,
34 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
35 vec,
36};
37use sum_tree::TreeMap;
38use text::{operation_queue::OperationQueue, rope::TextDimension};
39pub use text::{Buffer as TextBuffer, Operation as _, *};
40use theme::SyntaxTheme;
41use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
42use util::{post_inc, TryFutureExt as _};
43
44#[cfg(any(test, feature = "test-support"))]
45pub use tree_sitter_rust;
46
47pub use lsp::DiagnosticSeverity;
48
49thread_local! {
50 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
51}
52
53lazy_static! {
54 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
55}
56
57// TODO - Make this configurable
58const INDENT_SIZE: u32 = 4;
59
60pub struct Buffer {
61 text: TextBuffer,
62 file: Option<Box<dyn File>>,
63 saved_version: clock::Global,
64 saved_mtime: SystemTime,
65 language: Option<Arc<Language>>,
66 autoindent_requests: Vec<Arc<AutoindentRequest>>,
67 pending_autoindent: Option<Task<()>>,
68 sync_parse_timeout: Duration,
69 syntax_tree: Mutex<Option<SyntaxTree>>,
70 parsing_in_background: bool,
71 parse_count: usize,
72 diagnostics: DiagnosticSet,
73 remote_selections: TreeMap<ReplicaId, SelectionSet>,
74 selections_update_count: usize,
75 diagnostics_update_count: usize,
76 language_server: Option<LanguageServerState>,
77 completion_triggers: Vec<String>,
78 deferred_ops: OperationQueue<Operation>,
79 #[cfg(test)]
80 pub(crate) operations: Vec<Operation>,
81}
82
83pub struct BufferSnapshot {
84 text: text::BufferSnapshot,
85 tree: Option<Tree>,
86 diagnostics: DiagnosticSet,
87 diagnostics_update_count: usize,
88 remote_selections: TreeMap<ReplicaId, SelectionSet>,
89 selections_update_count: usize,
90 is_parsing: bool,
91 language: Option<Arc<Language>>,
92 parse_count: usize,
93}
94
95#[derive(Clone, Debug)]
96struct SelectionSet {
97 selections: Arc<[Selection<Anchor>]>,
98 lamport_timestamp: clock::Lamport,
99}
100
101#[derive(Clone, Debug, PartialEq, Eq)]
102pub struct GroupId {
103 source: Arc<str>,
104 id: usize,
105}
106
107#[derive(Clone, Debug, PartialEq, Eq)]
108pub struct Diagnostic {
109 pub code: Option<String>,
110 pub severity: DiagnosticSeverity,
111 pub message: String,
112 pub group_id: usize,
113 pub is_valid: bool,
114 pub is_primary: bool,
115 pub is_disk_based: bool,
116}
117
118#[derive(Clone, Debug)]
119pub struct Completion<T> {
120 pub old_range: Range<T>,
121 pub new_text: String,
122 pub lsp_completion: lsp::CompletionItem,
123}
124
125struct LanguageServerState {
126 server: Arc<LanguageServer>,
127 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
128 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
129 next_version: usize,
130 _maintain_server: Task<()>,
131}
132
133#[derive(Clone)]
134struct LanguageServerSnapshot {
135 buffer_snapshot: text::BufferSnapshot,
136 version: usize,
137 path: Arc<Path>,
138}
139
140#[derive(Clone, Debug)]
141pub enum Operation {
142 Buffer(text::Operation),
143 UpdateDiagnostics {
144 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
145 lamport_timestamp: clock::Lamport,
146 },
147 UpdateSelections {
148 replica_id: ReplicaId,
149 selections: Arc<[Selection<Anchor>]>,
150 lamport_timestamp: clock::Lamport,
151 },
152 UpdateCompletionTriggers {
153 triggers: Vec<String>,
154 },
155}
156
157#[derive(Clone, Debug, Eq, PartialEq)]
158pub enum Event {
159 Edited,
160 Dirtied,
161 Saved,
162 FileHandleChanged,
163 Reloaded,
164 Reparsed,
165 DiagnosticsUpdated,
166 Closed,
167}
168
169pub trait File {
170 fn as_local(&self) -> Option<&dyn LocalFile>;
171
172 fn is_local(&self) -> bool {
173 self.as_local().is_some()
174 }
175
176 fn mtime(&self) -> SystemTime;
177
178 /// Returns the path of this file relative to the worktree's root directory.
179 fn path(&self) -> &Arc<Path>;
180
181 /// Returns the path of this file relative to the worktree's parent directory (this means it
182 /// includes the name of the worktree's root folder).
183 fn full_path(&self, cx: &AppContext) -> PathBuf;
184
185 /// Returns the last component of this handle's absolute path. If this handle refers to the root
186 /// of its worktree, then this method will return the name of the worktree itself.
187 fn file_name(&self, cx: &AppContext) -> OsString;
188
189 fn is_deleted(&self) -> bool;
190
191 fn save(
192 &self,
193 buffer_id: u64,
194 text: Rope,
195 version: clock::Global,
196 cx: &mut MutableAppContext,
197 ) -> Task<Result<(clock::Global, SystemTime)>>;
198
199 fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
200 -> Option<Task<Result<()>>>;
201
202 fn completions(
203 &self,
204 buffer_id: u64,
205 position: Anchor,
206 cx: &mut MutableAppContext,
207 ) -> Task<Result<Vec<Completion<Anchor>>>>;
208
209 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
210
211 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
212
213 fn as_any(&self) -> &dyn Any;
214
215 fn to_proto(&self) -> rpc::proto::File;
216}
217
218pub trait LocalFile: File {
219 /// Returns the absolute path of this file.
220 fn abs_path(&self, cx: &AppContext) -> PathBuf;
221
222 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
223
224 fn buffer_reloaded(
225 &self,
226 buffer_id: u64,
227 version: &clock::Global,
228 mtime: SystemTime,
229 cx: &mut MutableAppContext,
230 );
231}
232
233#[cfg(feature = "test-support")]
234pub struct FakeFile {
235 pub path: Arc<Path>,
236}
237
238#[cfg(feature = "test-support")]
239impl File for FakeFile {
240 fn as_local(&self) -> Option<&dyn LocalFile> {
241 Some(self)
242 }
243
244 fn mtime(&self) -> SystemTime {
245 SystemTime::UNIX_EPOCH
246 }
247
248 fn path(&self) -> &Arc<Path> {
249 &self.path
250 }
251
252 fn full_path(&self, _: &AppContext) -> PathBuf {
253 self.path.to_path_buf()
254 }
255
256 fn file_name(&self, _: &AppContext) -> OsString {
257 self.path.file_name().unwrap().to_os_string()
258 }
259
260 fn is_deleted(&self) -> bool {
261 false
262 }
263
264 fn save(
265 &self,
266 _: u64,
267 _: Rope,
268 _: clock::Global,
269 cx: &mut MutableAppContext,
270 ) -> Task<Result<(clock::Global, SystemTime)>> {
271 cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
272 }
273
274 fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option<Task<Result<()>>> {
275 None
276 }
277
278 fn completions(
279 &self,
280 _: u64,
281 _: Anchor,
282 _: &mut MutableAppContext,
283 ) -> Task<Result<Vec<Completion<Anchor>>>> {
284 Task::ready(Ok(Default::default()))
285 }
286
287 fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
288
289 fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
290
291 fn as_any(&self) -> &dyn Any {
292 self
293 }
294
295 fn to_proto(&self) -> rpc::proto::File {
296 unimplemented!()
297 }
298}
299
300#[cfg(feature = "test-support")]
301impl LocalFile for FakeFile {
302 fn abs_path(&self, _: &AppContext) -> PathBuf {
303 self.path.to_path_buf()
304 }
305
306 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
307 cx.background().spawn(async move { Ok(Default::default()) })
308 }
309
310 fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
311 }
312}
313
314pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
315
316#[derive(Clone)]
317struct SyntaxTree {
318 tree: Tree,
319 version: clock::Global,
320}
321
322#[derive(Clone)]
323struct AutoindentRequest {
324 before_edit: BufferSnapshot,
325 edited: Vec<Anchor>,
326 inserted: Option<Vec<Range<Anchor>>>,
327}
328
329#[derive(Debug)]
330struct IndentSuggestion {
331 basis_row: u32,
332 indent: bool,
333}
334
335struct TextProvider<'a>(&'a Rope);
336
337struct BufferChunkHighlights<'a> {
338 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
339 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
340 stack: Vec<(usize, HighlightId)>,
341 highlight_map: HighlightMap,
342 theme: &'a SyntaxTheme,
343 _query_cursor: QueryCursorHandle,
344}
345
346pub struct BufferChunks<'a> {
347 range: Range<usize>,
348 chunks: rope::Chunks<'a>,
349 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
350 error_depth: usize,
351 warning_depth: usize,
352 information_depth: usize,
353 hint_depth: usize,
354 highlights: Option<BufferChunkHighlights<'a>>,
355}
356
357#[derive(Clone, Copy, Debug, Default)]
358pub struct Chunk<'a> {
359 pub text: &'a str,
360 pub highlight_style: Option<HighlightStyle>,
361 pub diagnostic: Option<DiagnosticSeverity>,
362}
363
364pub(crate) struct Diff {
365 base_version: clock::Global,
366 new_text: Arc<str>,
367 changes: Vec<(ChangeTag, usize)>,
368}
369
370#[derive(Clone, Copy)]
371struct DiagnosticEndpoint {
372 offset: usize,
373 is_start: bool,
374 severity: DiagnosticSeverity,
375}
376
377impl Buffer {
378 pub fn new<T: Into<Arc<str>>>(
379 replica_id: ReplicaId,
380 base_text: T,
381 cx: &mut ModelContext<Self>,
382 ) -> Self {
383 Self::build(
384 TextBuffer::new(
385 replica_id,
386 cx.model_id() as u64,
387 History::new(base_text.into()),
388 ),
389 None,
390 )
391 }
392
393 pub fn from_file<T: Into<Arc<str>>>(
394 replica_id: ReplicaId,
395 base_text: T,
396 file: Box<dyn File>,
397 cx: &mut ModelContext<Self>,
398 ) -> Self {
399 Self::build(
400 TextBuffer::new(
401 replica_id,
402 cx.model_id() as u64,
403 History::new(base_text.into()),
404 ),
405 Some(file),
406 )
407 }
408
409 pub fn from_proto(
410 replica_id: ReplicaId,
411 message: proto::BufferState,
412 file: Option<Box<dyn File>>,
413 cx: &mut ModelContext<Self>,
414 ) -> Result<Self> {
415 let fragments_len = message.fragments.len();
416 let buffer = TextBuffer::from_parts(
417 replica_id,
418 message.id,
419 &message.visible_text,
420 &message.deleted_text,
421 message
422 .undo_map
423 .into_iter()
424 .map(proto::deserialize_undo_map_entry),
425 message
426 .fragments
427 .into_iter()
428 .enumerate()
429 .map(|(i, fragment)| {
430 proto::deserialize_buffer_fragment(fragment, i, fragments_len)
431 }),
432 message.lamport_timestamp,
433 From::from(message.version),
434 );
435 let mut this = Self::build(buffer, file);
436 for selection_set in message.selections {
437 this.remote_selections.insert(
438 selection_set.replica_id as ReplicaId,
439 SelectionSet {
440 selections: proto::deserialize_selections(selection_set.selections),
441 lamport_timestamp: clock::Lamport {
442 replica_id: selection_set.replica_id as ReplicaId,
443 value: selection_set.lamport_timestamp,
444 },
445 },
446 );
447 }
448 let snapshot = this.snapshot();
449 let entries = proto::deserialize_diagnostics(message.diagnostics);
450 this.apply_diagnostic_update(
451 DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
452 cx,
453 );
454
455 this.completion_triggers = message.completion_triggers;
456
457 let deferred_ops = message
458 .deferred_operations
459 .into_iter()
460 .map(proto::deserialize_operation)
461 .collect::<Result<Vec<_>>>()?;
462 this.apply_ops(deferred_ops, cx)?;
463
464 Ok(this)
465 }
466
467 pub fn to_proto(&self) -> proto::BufferState {
468 proto::BufferState {
469 id: self.remote_id(),
470 file: self.file.as_ref().map(|f| f.to_proto()),
471 visible_text: self.text.text(),
472 deleted_text: self.text.deleted_text(),
473 undo_map: self
474 .text
475 .undo_history()
476 .map(proto::serialize_undo_map_entry)
477 .collect(),
478 version: From::from(&self.version),
479 lamport_timestamp: self.lamport_clock.value,
480 fragments: self
481 .text
482 .fragments()
483 .map(proto::serialize_buffer_fragment)
484 .collect(),
485 selections: self
486 .remote_selections
487 .iter()
488 .map(|(replica_id, set)| proto::SelectionSet {
489 replica_id: *replica_id as u32,
490 selections: proto::serialize_selections(&set.selections),
491 lamport_timestamp: set.lamport_timestamp.value,
492 })
493 .collect(),
494 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
495 deferred_operations: self
496 .deferred_ops
497 .iter()
498 .map(proto::serialize_operation)
499 .chain(
500 self.text
501 .deferred_ops()
502 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
503 )
504 .collect(),
505 completion_triggers: self.completion_triggers.clone(),
506 }
507 }
508
509 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
510 self.set_language(Some(language), cx);
511 self
512 }
513
514 pub fn with_language_server(
515 mut self,
516 server: Arc<LanguageServer>,
517 cx: &mut ModelContext<Self>,
518 ) -> Self {
519 self.set_language_server(Some(server), cx);
520 self
521 }
522
523 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
524 let saved_mtime;
525 if let Some(file) = file.as_ref() {
526 saved_mtime = file.mtime();
527 } else {
528 saved_mtime = UNIX_EPOCH;
529 }
530
531 Self {
532 saved_mtime,
533 saved_version: buffer.version(),
534 text: buffer,
535 file,
536 syntax_tree: Mutex::new(None),
537 parsing_in_background: false,
538 parse_count: 0,
539 sync_parse_timeout: Duration::from_millis(1),
540 autoindent_requests: Default::default(),
541 pending_autoindent: Default::default(),
542 language: None,
543 remote_selections: Default::default(),
544 selections_update_count: 0,
545 diagnostics: Default::default(),
546 diagnostics_update_count: 0,
547 language_server: None,
548 completion_triggers: Default::default(),
549 deferred_ops: OperationQueue::new(),
550 #[cfg(test)]
551 operations: Default::default(),
552 }
553 }
554
555 pub fn snapshot(&self) -> BufferSnapshot {
556 BufferSnapshot {
557 text: self.text.snapshot(),
558 tree: self.syntax_tree(),
559 remote_selections: self.remote_selections.clone(),
560 diagnostics: self.diagnostics.clone(),
561 diagnostics_update_count: self.diagnostics_update_count,
562 is_parsing: self.parsing_in_background,
563 language: self.language.clone(),
564 parse_count: self.parse_count,
565 selections_update_count: self.selections_update_count,
566 }
567 }
568
569 pub fn file(&self) -> Option<&dyn File> {
570 self.file.as_deref()
571 }
572
573 pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
574 let file = if let Some(file) = self.file.as_ref() {
575 file
576 } else {
577 return Task::ready(Err(anyhow!("buffer has no file")));
578 };
579
580 if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
581 let server = server.clone();
582 let abs_path = file.as_local().unwrap().abs_path(cx);
583 let version = self.version();
584 cx.spawn(|this, mut cx| async move {
585 let edits = server
586 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
587 text_document: lsp::TextDocumentIdentifier::new(
588 lsp::Url::from_file_path(&abs_path).unwrap(),
589 ),
590 options: Default::default(),
591 work_done_progress_params: Default::default(),
592 })
593 .await?;
594
595 if let Some(edits) = edits {
596 this.update(&mut cx, |this, cx| {
597 if this.version == version {
598 this.apply_lsp_edits(edits, cx)
599 } else {
600 Err(anyhow!("buffer edited since starting to format"))
601 }
602 })
603 } else {
604 Ok(())
605 }
606 })
607 } else {
608 let format = file.format_remote(self.remote_id(), cx.as_mut());
609 cx.spawn(|_, _| async move {
610 if let Some(format) = format {
611 format.await?;
612 }
613 Ok(())
614 })
615 }
616 }
617
618 pub fn save(
619 &mut self,
620 cx: &mut ModelContext<Self>,
621 ) -> Task<Result<(clock::Global, SystemTime)>> {
622 let file = if let Some(file) = self.file.as_ref() {
623 file
624 } else {
625 return Task::ready(Err(anyhow!("buffer has no file")));
626 };
627 let text = self.as_rope().clone();
628 let version = self.version();
629 let save = file.save(self.remote_id(), text, version, cx.as_mut());
630 cx.spawn(|this, mut cx| async move {
631 let (version, mtime) = save.await?;
632 this.update(&mut cx, |this, cx| {
633 this.did_save(version.clone(), mtime, None, cx);
634 });
635 Ok((version, mtime))
636 })
637 }
638
639 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
640 self.language = language;
641 self.reparse(cx);
642 }
643
644 pub fn set_language_server(
645 &mut self,
646 language_server: Option<Arc<lsp::LanguageServer>>,
647 cx: &mut ModelContext<Self>,
648 ) {
649 self.language_server = if let Some(server) = language_server {
650 let (latest_snapshot_tx, mut latest_snapshot_rx) =
651 watch::channel::<Option<LanguageServerSnapshot>>();
652
653 let maintain_changes = cx.background().spawn({
654 let server = server.clone();
655 async move {
656 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
657 while let Some(snapshot) = latest_snapshot_rx.recv().await {
658 if let Some(snapshot) = snapshot {
659 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
660 if let Some(prev_snapshot) = prev_snapshot {
661 let changes = lsp::DidChangeTextDocumentParams {
662 text_document: lsp::VersionedTextDocumentIdentifier::new(
663 uri,
664 snapshot.version as i32,
665 ),
666 content_changes: snapshot
667 .buffer_snapshot
668 .edits_since::<(PointUtf16, usize)>(
669 prev_snapshot.buffer_snapshot.version(),
670 )
671 .map(|edit| {
672 let edit_start = edit.new.start.0;
673 let edit_end =
674 edit_start + (edit.old.end.0 - edit.old.start.0);
675 let new_text = snapshot
676 .buffer_snapshot
677 .text_for_range(edit.new.start.1..edit.new.end.1)
678 .collect();
679 lsp::TextDocumentContentChangeEvent {
680 range: Some(lsp::Range::new(
681 edit_start.to_lsp_position(),
682 edit_end.to_lsp_position(),
683 )),
684 range_length: None,
685 text: new_text,
686 }
687 })
688 .collect(),
689 };
690 server
691 .notify::<lsp::notification::DidChangeTextDocument>(changes)
692 .await?;
693 } else {
694 server
695 .notify::<lsp::notification::DidOpenTextDocument>(
696 lsp::DidOpenTextDocumentParams {
697 text_document: lsp::TextDocumentItem::new(
698 uri,
699 Default::default(),
700 snapshot.version as i32,
701 snapshot.buffer_snapshot.text().to_string(),
702 ),
703 },
704 )
705 .await?;
706 }
707
708 prev_snapshot = Some(snapshot);
709 }
710 }
711 Ok(())
712 }
713 });
714
715 Some(LanguageServerState {
716 latest_snapshot: latest_snapshot_tx,
717 pending_snapshots: Default::default(),
718 next_version: 0,
719 server: server.clone(),
720 _maintain_server: cx.spawn_weak(|this, mut cx| async move {
721 let mut capabilities = server.capabilities();
722 loop {
723 if let Some(capabilities) = capabilities.recv().await.flatten() {
724 if let Some(this) = this.upgrade(&cx) {
725 let triggers = capabilities
726 .completion_provider
727 .and_then(|c| c.trigger_characters)
728 .unwrap_or_default();
729 this.update(&mut cx, |this, cx| {
730 this.completion_triggers = triggers.clone();
731 this.send_operation(
732 Operation::UpdateCompletionTriggers { triggers },
733 cx,
734 );
735 });
736 } else {
737 return;
738 }
739
740 break;
741 }
742 }
743
744 maintain_changes.log_err().await;
745 }),
746 })
747 } else {
748 None
749 };
750
751 self.update_language_server(cx);
752 }
753
754 pub fn did_save(
755 &mut self,
756 version: clock::Global,
757 mtime: SystemTime,
758 new_file: Option<Box<dyn File>>,
759 cx: &mut ModelContext<Self>,
760 ) {
761 self.saved_mtime = mtime;
762 self.saved_version = version;
763 if let Some(new_file) = new_file {
764 self.file = Some(new_file);
765 }
766 if let Some((state, local_file)) = &self
767 .language_server
768 .as_ref()
769 .zip(self.file.as_ref().and_then(|f| f.as_local()))
770 {
771 cx.background()
772 .spawn(
773 state
774 .server
775 .notify::<lsp::notification::DidSaveTextDocument>(
776 lsp::DidSaveTextDocumentParams {
777 text_document: lsp::TextDocumentIdentifier {
778 uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
779 },
780 text: None,
781 },
782 ),
783 )
784 .detach()
785 }
786 cx.emit(Event::Saved);
787 }
788
789 pub fn did_reload(
790 &mut self,
791 version: clock::Global,
792 mtime: SystemTime,
793 cx: &mut ModelContext<Self>,
794 ) {
795 self.saved_mtime = mtime;
796 self.saved_version = version;
797 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
798 file.buffer_reloaded(self.remote_id(), &self.saved_version, self.saved_mtime, cx);
799 }
800 cx.emit(Event::Reloaded);
801 cx.notify();
802 }
803
804 pub fn file_updated(
805 &mut self,
806 new_file: Box<dyn File>,
807 cx: &mut ModelContext<Self>,
808 ) -> Task<()> {
809 let old_file = if let Some(file) = self.file.as_ref() {
810 file
811 } else {
812 return Task::ready(());
813 };
814 let mut file_changed = false;
815 let mut task = Task::ready(());
816
817 if new_file.path() != old_file.path() {
818 file_changed = true;
819 }
820
821 if new_file.is_deleted() {
822 if !old_file.is_deleted() {
823 file_changed = true;
824 if !self.is_dirty() {
825 cx.emit(Event::Dirtied);
826 }
827 }
828 } else {
829 let new_mtime = new_file.mtime();
830 if new_mtime != old_file.mtime() {
831 file_changed = true;
832
833 if !self.is_dirty() {
834 task = cx.spawn(|this, mut cx| {
835 async move {
836 let new_text = this.read_with(&cx, |this, cx| {
837 this.file
838 .as_ref()
839 .and_then(|file| file.as_local().map(|f| f.load(cx)))
840 });
841 if let Some(new_text) = new_text {
842 let new_text = new_text.await?;
843 let diff = this
844 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
845 .await;
846 this.update(&mut cx, |this, cx| {
847 if this.apply_diff(diff, cx) {
848 this.did_reload(this.version(), new_mtime, cx);
849 }
850 });
851 }
852 Ok(())
853 }
854 .log_err()
855 .map(drop)
856 });
857 }
858 }
859 }
860
861 if file_changed {
862 cx.emit(Event::FileHandleChanged);
863 }
864 self.file = Some(new_file);
865 task
866 }
867
868 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
869 cx.emit(Event::Closed);
870 }
871
872 pub fn language(&self) -> Option<&Arc<Language>> {
873 self.language.as_ref()
874 }
875
876 pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
877 self.language_server.as_ref().map(|state| &state.server)
878 }
879
880 pub fn parse_count(&self) -> usize {
881 self.parse_count
882 }
883
884 pub fn selections_update_count(&self) -> usize {
885 self.selections_update_count
886 }
887
888 pub fn diagnostics_update_count(&self) -> usize {
889 self.diagnostics_update_count
890 }
891
892 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
893 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
894 self.interpolate_tree(syntax_tree);
895 Some(syntax_tree.tree.clone())
896 } else {
897 None
898 }
899 }
900
901 #[cfg(any(test, feature = "test-support"))]
902 pub fn is_parsing(&self) -> bool {
903 self.parsing_in_background
904 }
905
906 #[cfg(test)]
907 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
908 self.sync_parse_timeout = timeout;
909 }
910
911 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
912 if self.parsing_in_background {
913 return false;
914 }
915
916 if let Some(grammar) = self.grammar().cloned() {
917 let old_tree = self.syntax_tree();
918 let text = self.as_rope().clone();
919 let parsed_version = self.version();
920 let parse_task = cx.background().spawn({
921 let grammar = grammar.clone();
922 async move { Self::parse_text(&text, old_tree, &grammar) }
923 });
924
925 match cx
926 .background()
927 .block_with_timeout(self.sync_parse_timeout, parse_task)
928 {
929 Ok(new_tree) => {
930 self.did_finish_parsing(new_tree, parsed_version, cx);
931 return true;
932 }
933 Err(parse_task) => {
934 self.parsing_in_background = true;
935 cx.spawn(move |this, mut cx| async move {
936 let new_tree = parse_task.await;
937 this.update(&mut cx, move |this, cx| {
938 let grammar_changed = this
939 .grammar()
940 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
941 let parse_again =
942 this.version.changed_since(&parsed_version) || grammar_changed;
943 this.parsing_in_background = false;
944 this.did_finish_parsing(new_tree, parsed_version, cx);
945
946 if parse_again && this.reparse(cx) {
947 return;
948 }
949 });
950 })
951 .detach();
952 }
953 }
954 }
955 false
956 }
957
958 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
959 PARSER.with(|parser| {
960 let mut parser = parser.borrow_mut();
961 parser
962 .set_language(grammar.ts_language)
963 .expect("incompatible grammar");
964 let mut chunks = text.chunks_in_range(0..text.len());
965 let tree = parser
966 .parse_with(
967 &mut move |offset, _| {
968 chunks.seek(offset);
969 chunks.next().unwrap_or("").as_bytes()
970 },
971 old_tree.as_ref(),
972 )
973 .unwrap();
974 tree
975 })
976 }
977
978 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
979 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
980 let (bytes, lines) = edit.flatten();
981 tree.tree.edit(&InputEdit {
982 start_byte: bytes.new.start,
983 old_end_byte: bytes.new.start + bytes.old.len(),
984 new_end_byte: bytes.new.end,
985 start_position: lines.new.start.to_ts_point(),
986 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
987 .to_ts_point(),
988 new_end_position: lines.new.end.to_ts_point(),
989 });
990 }
991 tree.version = self.version();
992 }
993
994 fn did_finish_parsing(
995 &mut self,
996 tree: Tree,
997 version: clock::Global,
998 cx: &mut ModelContext<Self>,
999 ) {
1000 self.parse_count += 1;
1001 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
1002 self.request_autoindent(cx);
1003 cx.emit(Event::Reparsed);
1004 cx.notify();
1005 }
1006
1007 pub fn update_diagnostics<T>(
1008 &mut self,
1009 version: Option<i32>,
1010 mut diagnostics: Vec<DiagnosticEntry<T>>,
1011 cx: &mut ModelContext<Self>,
1012 ) -> Result<()>
1013 where
1014 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
1015 {
1016 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1017 Ordering::Equal
1018 .then_with(|| b.is_primary.cmp(&a.is_primary))
1019 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1020 .then_with(|| a.severity.cmp(&b.severity))
1021 .then_with(|| a.message.cmp(&b.message))
1022 }
1023
1024 let version = version.map(|version| version as usize);
1025 let content =
1026 if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
1027 language_server
1028 .pending_snapshots
1029 .retain(|&v, _| v >= version);
1030 let snapshot = language_server
1031 .pending_snapshots
1032 .get(&version)
1033 .ok_or_else(|| anyhow!("missing snapshot"))?;
1034 &snapshot.buffer_snapshot
1035 } else {
1036 self.deref()
1037 };
1038
1039 diagnostics.sort_unstable_by(|a, b| {
1040 Ordering::Equal
1041 .then_with(|| a.range.start.cmp(&b.range.start))
1042 .then_with(|| b.range.end.cmp(&a.range.end))
1043 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1044 });
1045
1046 let mut sanitized_diagnostics = Vec::new();
1047 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
1048 let mut last_edit_old_end = T::default();
1049 let mut last_edit_new_end = T::default();
1050 'outer: for entry in diagnostics {
1051 let mut start = entry.range.start;
1052 let mut end = entry.range.end;
1053
1054 // Some diagnostics are based on files on disk instead of buffers'
1055 // current contents. Adjust these diagnostics' ranges to reflect
1056 // any unsaved edits.
1057 if entry.diagnostic.is_disk_based {
1058 while let Some(edit) = edits_since_save.peek() {
1059 if edit.old.end <= start {
1060 last_edit_old_end = edit.old.end;
1061 last_edit_new_end = edit.new.end;
1062 edits_since_save.next();
1063 } else if edit.old.start <= end && edit.old.end >= start {
1064 continue 'outer;
1065 } else {
1066 break;
1067 }
1068 }
1069
1070 let start_overshoot = start - last_edit_old_end;
1071 start = last_edit_new_end;
1072 start.add_assign(&start_overshoot);
1073
1074 let end_overshoot = end - last_edit_old_end;
1075 end = last_edit_new_end;
1076 end.add_assign(&end_overshoot);
1077 }
1078
1079 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
1080 let mut range = range.start.to_point(content)..range.end.to_point(content);
1081 // Expand empty ranges by one character
1082 if range.start == range.end {
1083 range.end.column += 1;
1084 range.end = content.clip_point(range.end, Bias::Right);
1085 if range.start == range.end && range.end.column > 0 {
1086 range.start.column -= 1;
1087 range.start = content.clip_point(range.start, Bias::Left);
1088 }
1089 }
1090
1091 sanitized_diagnostics.push(DiagnosticEntry {
1092 range,
1093 diagnostic: entry.diagnostic,
1094 });
1095 }
1096 drop(edits_since_save);
1097
1098 let set = DiagnosticSet::new(sanitized_diagnostics, content);
1099 self.apply_diagnostic_update(set.clone(), cx);
1100
1101 let op = Operation::UpdateDiagnostics {
1102 diagnostics: set.iter().cloned().collect(),
1103 lamport_timestamp: self.text.lamport_clock.tick(),
1104 };
1105 self.send_operation(op, cx);
1106 Ok(())
1107 }
1108
1109 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1110 if let Some(indent_columns) = self.compute_autoindents() {
1111 let indent_columns = cx.background().spawn(indent_columns);
1112 match cx
1113 .background()
1114 .block_with_timeout(Duration::from_micros(500), indent_columns)
1115 {
1116 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
1117 Err(indent_columns) => {
1118 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1119 let indent_columns = indent_columns.await;
1120 this.update(&mut cx, |this, cx| {
1121 this.apply_autoindents(indent_columns, cx);
1122 });
1123 }));
1124 }
1125 }
1126 }
1127 }
1128
1129 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
1130 let max_rows_between_yields = 100;
1131 let snapshot = self.snapshot();
1132 if snapshot.language.is_none()
1133 || snapshot.tree.is_none()
1134 || self.autoindent_requests.is_empty()
1135 {
1136 return None;
1137 }
1138
1139 let autoindent_requests = self.autoindent_requests.clone();
1140 Some(async move {
1141 let mut indent_columns = BTreeMap::new();
1142 for request in autoindent_requests {
1143 let old_to_new_rows = request
1144 .edited
1145 .iter()
1146 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1147 .zip(
1148 request
1149 .edited
1150 .iter()
1151 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1152 )
1153 .collect::<BTreeMap<u32, u32>>();
1154
1155 let mut old_suggestions = HashMap::<u32, u32>::default();
1156 let old_edited_ranges =
1157 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1158 for old_edited_range in old_edited_ranges {
1159 let suggestions = request
1160 .before_edit
1161 .suggest_autoindents(old_edited_range.clone())
1162 .into_iter()
1163 .flatten();
1164 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1165 let indentation_basis = old_to_new_rows
1166 .get(&suggestion.basis_row)
1167 .and_then(|from_row| old_suggestions.get(from_row).copied())
1168 .unwrap_or_else(|| {
1169 request
1170 .before_edit
1171 .indent_column_for_line(suggestion.basis_row)
1172 });
1173 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1174 old_suggestions.insert(
1175 *old_to_new_rows.get(&old_row).unwrap(),
1176 indentation_basis + delta,
1177 );
1178 }
1179 yield_now().await;
1180 }
1181
1182 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1183 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1184 let new_edited_row_ranges =
1185 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1186 for new_edited_row_range in new_edited_row_ranges {
1187 let suggestions = snapshot
1188 .suggest_autoindents(new_edited_row_range.clone())
1189 .into_iter()
1190 .flatten();
1191 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1192 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1193 let new_indentation = indent_columns
1194 .get(&suggestion.basis_row)
1195 .copied()
1196 .unwrap_or_else(|| {
1197 snapshot.indent_column_for_line(suggestion.basis_row)
1198 })
1199 + delta;
1200 if old_suggestions
1201 .get(&new_row)
1202 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1203 {
1204 indent_columns.insert(new_row, new_indentation);
1205 }
1206 }
1207 yield_now().await;
1208 }
1209
1210 if let Some(inserted) = request.inserted.as_ref() {
1211 let inserted_row_ranges = contiguous_ranges(
1212 inserted
1213 .iter()
1214 .map(|range| range.to_point(&snapshot))
1215 .flat_map(|range| range.start.row..range.end.row + 1),
1216 max_rows_between_yields,
1217 );
1218 for inserted_row_range in inserted_row_ranges {
1219 let suggestions = snapshot
1220 .suggest_autoindents(inserted_row_range.clone())
1221 .into_iter()
1222 .flatten();
1223 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1224 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1225 let new_indentation = indent_columns
1226 .get(&suggestion.basis_row)
1227 .copied()
1228 .unwrap_or_else(|| {
1229 snapshot.indent_column_for_line(suggestion.basis_row)
1230 })
1231 + delta;
1232 indent_columns.insert(row, new_indentation);
1233 }
1234 yield_now().await;
1235 }
1236 }
1237 }
1238 indent_columns
1239 })
1240 }
1241
1242 fn apply_autoindents(
1243 &mut self,
1244 indent_columns: BTreeMap<u32, u32>,
1245 cx: &mut ModelContext<Self>,
1246 ) {
1247 self.autoindent_requests.clear();
1248 self.start_transaction();
1249 for (row, indent_column) in &indent_columns {
1250 self.set_indent_column_for_line(*row, *indent_column, cx);
1251 }
1252 self.end_transaction(cx);
1253 }
1254
1255 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1256 let current_column = self.indent_column_for_line(row);
1257 if column > current_column {
1258 let offset = Point::new(row, 0).to_offset(&*self);
1259 self.edit(
1260 [offset..offset],
1261 " ".repeat((column - current_column) as usize),
1262 cx,
1263 );
1264 } else if column < current_column {
1265 self.edit(
1266 [Point::new(row, 0)..Point::new(row, current_column - column)],
1267 "",
1268 cx,
1269 );
1270 }
1271 }
1272
1273 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1274 // TODO: it would be nice to not allocate here.
1275 let old_text = self.text();
1276 let base_version = self.version();
1277 cx.background().spawn(async move {
1278 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1279 .iter_all_changes()
1280 .map(|c| (c.tag(), c.value().len()))
1281 .collect::<Vec<_>>();
1282 Diff {
1283 base_version,
1284 new_text,
1285 changes,
1286 }
1287 })
1288 }
1289
1290 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1291 if self.version == diff.base_version {
1292 self.start_transaction();
1293 let mut offset = 0;
1294 for (tag, len) in diff.changes {
1295 let range = offset..(offset + len);
1296 match tag {
1297 ChangeTag::Equal => offset += len,
1298 ChangeTag::Delete => self.edit(Some(range), "", cx),
1299 ChangeTag::Insert => {
1300 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1301 offset += len;
1302 }
1303 }
1304 }
1305 self.end_transaction(cx);
1306 true
1307 } else {
1308 false
1309 }
1310 }
1311
1312 pub fn is_dirty(&self) -> bool {
1313 !self.saved_version.observed_all(&self.version)
1314 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1315 }
1316
1317 pub fn has_conflict(&self) -> bool {
1318 !self.saved_version.observed_all(&self.version)
1319 && self
1320 .file
1321 .as_ref()
1322 .map_or(false, |file| file.mtime() > self.saved_mtime)
1323 }
1324
1325 pub fn subscribe(&mut self) -> Subscription {
1326 self.text.subscribe()
1327 }
1328
1329 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1330 self.start_transaction_at(Instant::now())
1331 }
1332
1333 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1334 self.text.start_transaction_at(now)
1335 }
1336
1337 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1338 self.end_transaction_at(Instant::now(), cx)
1339 }
1340
1341 pub fn end_transaction_at(
1342 &mut self,
1343 now: Instant,
1344 cx: &mut ModelContext<Self>,
1345 ) -> Option<TransactionId> {
1346 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1347 let was_dirty = start_version != self.saved_version;
1348 self.did_edit(&start_version, was_dirty, cx);
1349 Some(transaction_id)
1350 } else {
1351 None
1352 }
1353 }
1354
1355 pub fn avoid_grouping_next_transaction(&mut self) {
1356 self.text.avoid_grouping_next_transaction();
1357 }
1358
1359 pub fn set_active_selections(
1360 &mut self,
1361 selections: Arc<[Selection<Anchor>]>,
1362 cx: &mut ModelContext<Self>,
1363 ) {
1364 let lamport_timestamp = self.text.lamport_clock.tick();
1365 self.remote_selections.insert(
1366 self.text.replica_id(),
1367 SelectionSet {
1368 selections: selections.clone(),
1369 lamport_timestamp,
1370 },
1371 );
1372 self.send_operation(
1373 Operation::UpdateSelections {
1374 replica_id: self.text.replica_id(),
1375 selections,
1376 lamport_timestamp,
1377 },
1378 cx,
1379 );
1380 }
1381
1382 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1383 self.set_active_selections(Arc::from([]), cx);
1384 }
1385
1386 fn update_language_server(&mut self, cx: &AppContext) {
1387 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1388 language_server
1389 } else {
1390 return;
1391 };
1392 let abs_path = self
1393 .file
1394 .as_ref()
1395 .and_then(|f| f.as_local())
1396 .map_or(Path::new("/").to_path_buf(), |file| file.abs_path(cx));
1397
1398 let version = post_inc(&mut language_server.next_version);
1399 let snapshot = LanguageServerSnapshot {
1400 buffer_snapshot: self.text.snapshot(),
1401 version,
1402 path: Arc::from(abs_path),
1403 };
1404 language_server
1405 .pending_snapshots
1406 .insert(version, snapshot.clone());
1407 let _ = language_server
1408 .latest_snapshot
1409 .blocking_send(Some(snapshot));
1410 }
1411
1412 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1413 where
1414 I: IntoIterator<Item = Range<S>>,
1415 S: ToOffset,
1416 T: Into<String>,
1417 {
1418 self.edit_internal(ranges_iter, new_text, false, cx)
1419 }
1420
1421 pub fn edit_with_autoindent<I, S, T>(
1422 &mut self,
1423 ranges_iter: I,
1424 new_text: T,
1425 cx: &mut ModelContext<Self>,
1426 ) where
1427 I: IntoIterator<Item = Range<S>>,
1428 S: ToOffset,
1429 T: Into<String>,
1430 {
1431 self.edit_internal(ranges_iter, new_text, true, cx)
1432 }
1433
1434 pub fn edit_internal<I, S, T>(
1435 &mut self,
1436 ranges_iter: I,
1437 new_text: T,
1438 autoindent: bool,
1439 cx: &mut ModelContext<Self>,
1440 ) where
1441 I: IntoIterator<Item = Range<S>>,
1442 S: ToOffset,
1443 T: Into<String>,
1444 {
1445 let new_text = new_text.into();
1446
1447 // Skip invalid ranges and coalesce contiguous ones.
1448 let mut ranges: Vec<Range<usize>> = Vec::new();
1449 for range in ranges_iter {
1450 let range = range.start.to_offset(self)..range.end.to_offset(self);
1451 if !new_text.is_empty() || !range.is_empty() {
1452 if let Some(prev_range) = ranges.last_mut() {
1453 if prev_range.end >= range.start {
1454 prev_range.end = cmp::max(prev_range.end, range.end);
1455 } else {
1456 ranges.push(range);
1457 }
1458 } else {
1459 ranges.push(range);
1460 }
1461 }
1462 }
1463 if ranges.is_empty() {
1464 return;
1465 }
1466
1467 self.start_transaction();
1468 self.pending_autoindent.take();
1469 let autoindent_request = if autoindent && self.language.is_some() {
1470 let before_edit = self.snapshot();
1471 let edited = ranges
1472 .iter()
1473 .filter_map(|range| {
1474 let start = range.start.to_point(self);
1475 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1476 None
1477 } else {
1478 Some(self.anchor_before(range.start))
1479 }
1480 })
1481 .collect();
1482 Some((before_edit, edited))
1483 } else {
1484 None
1485 };
1486
1487 let first_newline_ix = new_text.find('\n');
1488 let new_text_len = new_text.len();
1489
1490 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1491
1492 if let Some((before_edit, edited)) = autoindent_request {
1493 let mut inserted = None;
1494 if let Some(first_newline_ix) = first_newline_ix {
1495 let mut delta = 0isize;
1496 inserted = Some(
1497 ranges
1498 .iter()
1499 .map(|range| {
1500 let start =
1501 (delta + range.start as isize) as usize + first_newline_ix + 1;
1502 let end = (delta + range.start as isize) as usize + new_text_len;
1503 delta +=
1504 (range.end as isize - range.start as isize) + new_text_len as isize;
1505 self.anchor_before(start)..self.anchor_after(end)
1506 })
1507 .collect(),
1508 );
1509 }
1510
1511 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1512 before_edit,
1513 edited,
1514 inserted,
1515 }));
1516 }
1517
1518 self.end_transaction(cx);
1519 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1520 }
1521
1522 fn apply_lsp_edits(
1523 &mut self,
1524 edits: Vec<lsp::TextEdit>,
1525 cx: &mut ModelContext<Self>,
1526 ) -> Result<()> {
1527 for edit in &edits {
1528 let range = range_from_lsp(edit.range);
1529 if self.clip_point_utf16(range.start, Bias::Left) != range.start
1530 || self.clip_point_utf16(range.end, Bias::Left) != range.end
1531 {
1532 return Err(anyhow!(
1533 "invalid formatting edits received from language server"
1534 ));
1535 }
1536 }
1537
1538 for edit in edits.into_iter().rev() {
1539 self.edit([range_from_lsp(edit.range)], edit.new_text, cx);
1540 }
1541
1542 Ok(())
1543 }
1544
1545 fn did_edit(
1546 &mut self,
1547 old_version: &clock::Global,
1548 was_dirty: bool,
1549 cx: &mut ModelContext<Self>,
1550 ) {
1551 if self.edits_since::<usize>(old_version).next().is_none() {
1552 return;
1553 }
1554
1555 self.reparse(cx);
1556 self.update_language_server(cx);
1557
1558 cx.emit(Event::Edited);
1559 if !was_dirty {
1560 cx.emit(Event::Dirtied);
1561 }
1562 cx.notify();
1563 }
1564
1565 fn grammar(&self) -> Option<&Arc<Grammar>> {
1566 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1567 }
1568
1569 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1570 &mut self,
1571 ops: I,
1572 cx: &mut ModelContext<Self>,
1573 ) -> Result<()> {
1574 self.pending_autoindent.take();
1575 let was_dirty = self.is_dirty();
1576 let old_version = self.version.clone();
1577 let mut deferred_ops = Vec::new();
1578 let buffer_ops = ops
1579 .into_iter()
1580 .filter_map(|op| match op {
1581 Operation::Buffer(op) => Some(op),
1582 _ => {
1583 if self.can_apply_op(&op) {
1584 self.apply_op(op, cx);
1585 } else {
1586 deferred_ops.push(op);
1587 }
1588 None
1589 }
1590 })
1591 .collect::<Vec<_>>();
1592 self.text.apply_ops(buffer_ops)?;
1593 self.deferred_ops.insert(deferred_ops);
1594 self.flush_deferred_ops(cx);
1595 self.did_edit(&old_version, was_dirty, cx);
1596 // Notify independently of whether the buffer was edited as the operations could include a
1597 // selection update.
1598 cx.notify();
1599 Ok(())
1600 }
1601
1602 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1603 let mut deferred_ops = Vec::new();
1604 for op in self.deferred_ops.drain().iter().cloned() {
1605 if self.can_apply_op(&op) {
1606 self.apply_op(op, cx);
1607 } else {
1608 deferred_ops.push(op);
1609 }
1610 }
1611 self.deferred_ops.insert(deferred_ops);
1612 }
1613
1614 fn can_apply_op(&self, operation: &Operation) -> bool {
1615 match operation {
1616 Operation::Buffer(_) => {
1617 unreachable!("buffer operations should never be applied at this layer")
1618 }
1619 Operation::UpdateDiagnostics {
1620 diagnostics: diagnostic_set,
1621 ..
1622 } => diagnostic_set.iter().all(|diagnostic| {
1623 self.text.can_resolve(&diagnostic.range.start)
1624 && self.text.can_resolve(&diagnostic.range.end)
1625 }),
1626 Operation::UpdateSelections { selections, .. } => selections
1627 .iter()
1628 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1629 Operation::UpdateCompletionTriggers { .. } => true,
1630 }
1631 }
1632
1633 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1634 match operation {
1635 Operation::Buffer(_) => {
1636 unreachable!("buffer operations should never be applied at this layer")
1637 }
1638 Operation::UpdateDiagnostics {
1639 diagnostics: diagnostic_set,
1640 ..
1641 } => {
1642 let snapshot = self.snapshot();
1643 self.apply_diagnostic_update(
1644 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1645 cx,
1646 );
1647 }
1648 Operation::UpdateSelections {
1649 replica_id,
1650 selections,
1651 lamport_timestamp,
1652 } => {
1653 if let Some(set) = self.remote_selections.get(&replica_id) {
1654 if set.lamport_timestamp > lamport_timestamp {
1655 return;
1656 }
1657 }
1658
1659 self.remote_selections.insert(
1660 replica_id,
1661 SelectionSet {
1662 selections,
1663 lamport_timestamp,
1664 },
1665 );
1666 self.text.lamport_clock.observe(lamport_timestamp);
1667 self.selections_update_count += 1;
1668 }
1669 Operation::UpdateCompletionTriggers { triggers } => {
1670 self.completion_triggers = triggers;
1671 }
1672 }
1673 }
1674
1675 fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
1676 self.diagnostics = diagnostics;
1677 self.diagnostics_update_count += 1;
1678 cx.notify();
1679 cx.emit(Event::DiagnosticsUpdated);
1680 }
1681
1682 #[cfg(not(test))]
1683 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1684 if let Some(file) = &self.file {
1685 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1686 }
1687 }
1688
1689 #[cfg(test)]
1690 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1691 self.operations.push(operation);
1692 }
1693
1694 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1695 self.remote_selections.remove(&replica_id);
1696 cx.notify();
1697 }
1698
1699 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1700 let was_dirty = self.is_dirty();
1701 let old_version = self.version.clone();
1702
1703 if let Some((transaction_id, operation)) = self.text.undo() {
1704 self.send_operation(Operation::Buffer(operation), cx);
1705 self.did_edit(&old_version, was_dirty, cx);
1706 Some(transaction_id)
1707 } else {
1708 None
1709 }
1710 }
1711
1712 pub fn undo_transaction(
1713 &mut self,
1714 transaction_id: TransactionId,
1715 cx: &mut ModelContext<Self>,
1716 ) -> bool {
1717 let was_dirty = self.is_dirty();
1718 let old_version = self.version.clone();
1719
1720 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1721 self.send_operation(Operation::Buffer(operation), cx);
1722 self.did_edit(&old_version, was_dirty, cx);
1723 true
1724 } else {
1725 false
1726 }
1727 }
1728
1729 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1730 let was_dirty = self.is_dirty();
1731 let old_version = self.version.clone();
1732
1733 if let Some((transaction_id, operation)) = self.text.redo() {
1734 self.send_operation(Operation::Buffer(operation), cx);
1735 self.did_edit(&old_version, was_dirty, cx);
1736 Some(transaction_id)
1737 } else {
1738 None
1739 }
1740 }
1741
1742 pub fn redo_transaction(
1743 &mut self,
1744 transaction_id: TransactionId,
1745 cx: &mut ModelContext<Self>,
1746 ) -> bool {
1747 let was_dirty = self.is_dirty();
1748 let old_version = self.version.clone();
1749
1750 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1751 self.send_operation(Operation::Buffer(operation), cx);
1752 self.did_edit(&old_version, was_dirty, cx);
1753 true
1754 } else {
1755 false
1756 }
1757 }
1758
1759 pub fn completions<T>(
1760 &self,
1761 position: T,
1762 cx: &mut ModelContext<Self>,
1763 ) -> Task<Result<Vec<Completion<Anchor>>>>
1764 where
1765 T: ToOffset,
1766 {
1767 let file = if let Some(file) = self.file.as_ref() {
1768 file
1769 } else {
1770 return Task::ready(Ok(Default::default()));
1771 };
1772
1773 if let Some(file) = file.as_local() {
1774 let server = if let Some(lang) = self.language_server.as_ref() {
1775 lang.server.clone()
1776 } else {
1777 return Task::ready(Ok(Default::default()));
1778 };
1779 let abs_path = file.abs_path(cx);
1780 let position = self.offset_to_point_utf16(position.to_offset(self));
1781
1782 cx.spawn(|this, cx| async move {
1783 let completions = server
1784 .request::<lsp::request::Completion>(lsp::CompletionParams {
1785 text_document_position: lsp::TextDocumentPositionParams::new(
1786 lsp::TextDocumentIdentifier::new(
1787 lsp::Url::from_file_path(abs_path).unwrap(),
1788 ),
1789 position.to_lsp_position(),
1790 ),
1791 context: Default::default(),
1792 work_done_progress_params: Default::default(),
1793 partial_result_params: Default::default(),
1794 })
1795 .await?;
1796
1797 let completions = if let Some(completions) = completions {
1798 match completions {
1799 lsp::CompletionResponse::Array(completions) => completions,
1800 lsp::CompletionResponse::List(list) => list.items,
1801 }
1802 } else {
1803 Default::default()
1804 };
1805
1806 this.read_with(&cx, |this, _| {
1807 Ok(completions.into_iter().filter_map(|lsp_completion| {
1808 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1809 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1810 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1811 log::info!("received an insert and replace completion but we don't yet support that");
1812 return None
1813 },
1814 };
1815
1816 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1817 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1818 if clipped_start == old_range.start && clipped_end == old_range.end {
1819 Some(Completion {
1820 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1821 new_text,
1822 lsp_completion,
1823 })
1824 } else {
1825 None
1826 }
1827 }).collect())
1828 })
1829 })
1830 } else {
1831 file.completions(self.remote_id(), self.anchor_before(position), cx.as_mut())
1832 }
1833 }
1834
1835 pub fn apply_additional_edits_for_completion(
1836 &mut self,
1837 completion: Completion<Anchor>,
1838 cx: &mut ModelContext<Self>,
1839 ) -> Option<Task<Result<()>>> {
1840 self.file.as_ref()?.as_local()?;
1841 let server = self.language_server.as_ref()?.server.clone();
1842 Some(cx.spawn(|this, mut cx| async move {
1843 let resolved_completion = server
1844 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1845 .await?;
1846 if let Some(additional_edits) = resolved_completion.additional_text_edits {
1847 this.update(&mut cx, |this, cx| {
1848 this.apply_lsp_edits(additional_edits, cx)
1849 })?;
1850 }
1851 Ok::<_, anyhow::Error>(())
1852 }))
1853 }
1854
1855 pub fn completion_triggers(&self) -> &[String] {
1856 &self.completion_triggers
1857 }
1858}
1859
1860#[cfg(any(test, feature = "test-support"))]
1861impl Buffer {
1862 pub fn set_group_interval(&mut self, group_interval: Duration) {
1863 self.text.set_group_interval(group_interval);
1864 }
1865
1866 pub fn randomly_edit<T>(
1867 &mut self,
1868 rng: &mut T,
1869 old_range_count: usize,
1870 cx: &mut ModelContext<Self>,
1871 ) where
1872 T: rand::Rng,
1873 {
1874 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1875 for _ in 0..old_range_count {
1876 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1877 if last_end > self.len() {
1878 break;
1879 }
1880 old_ranges.push(self.text.random_byte_range(last_end, rng));
1881 }
1882 let new_text_len = rng.gen_range(0..10);
1883 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1884 .take(new_text_len)
1885 .collect();
1886 log::info!(
1887 "mutating buffer {} at {:?}: {:?}",
1888 self.replica_id(),
1889 old_ranges,
1890 new_text
1891 );
1892 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1893 }
1894
1895 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1896 let was_dirty = self.is_dirty();
1897 let old_version = self.version.clone();
1898
1899 let ops = self.text.randomly_undo_redo(rng);
1900 if !ops.is_empty() {
1901 for op in ops {
1902 self.send_operation(Operation::Buffer(op), cx);
1903 self.did_edit(&old_version, was_dirty, cx);
1904 }
1905 }
1906 }
1907}
1908
1909impl Entity for Buffer {
1910 type Event = Event;
1911
1912 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1913 if let Some(file) = self.file.as_ref() {
1914 file.buffer_removed(self.remote_id(), cx);
1915 }
1916 }
1917}
1918
1919impl Deref for Buffer {
1920 type Target = TextBuffer;
1921
1922 fn deref(&self) -> &Self::Target {
1923 &self.text
1924 }
1925}
1926
1927impl BufferSnapshot {
1928 fn suggest_autoindents<'a>(
1929 &'a self,
1930 row_range: Range<u32>,
1931 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1932 let mut query_cursor = QueryCursorHandle::new();
1933 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1934 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1935
1936 // Get the "indentation ranges" that intersect this row range.
1937 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1938 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1939 query_cursor.set_point_range(
1940 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1941 ..Point::new(row_range.end, 0).to_ts_point(),
1942 );
1943 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1944 for mat in query_cursor.matches(
1945 &grammar.indents_query,
1946 tree.root_node(),
1947 TextProvider(self.as_rope()),
1948 ) {
1949 let mut node_kind = "";
1950 let mut start: Option<Point> = None;
1951 let mut end: Option<Point> = None;
1952 for capture in mat.captures {
1953 if Some(capture.index) == indent_capture_ix {
1954 node_kind = capture.node.kind();
1955 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1956 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1957 } else if Some(capture.index) == end_capture_ix {
1958 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1959 }
1960 }
1961
1962 if let Some((start, end)) = start.zip(end) {
1963 if start.row == end.row {
1964 continue;
1965 }
1966
1967 let range = start..end;
1968 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1969 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1970 Ok(ix) => {
1971 let prev_range = &mut indentation_ranges[ix];
1972 prev_range.0.end = prev_range.0.end.max(range.end);
1973 }
1974 }
1975 }
1976 }
1977
1978 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1979 Some(row_range.map(move |row| {
1980 let row_start = Point::new(row, self.indent_column_for_line(row));
1981
1982 let mut indent_from_prev_row = false;
1983 let mut outdent_to_row = u32::MAX;
1984 for (range, _node_kind) in &indentation_ranges {
1985 if range.start.row >= row {
1986 break;
1987 }
1988
1989 if range.start.row == prev_row && range.end > row_start {
1990 indent_from_prev_row = true;
1991 }
1992 if range.end.row >= prev_row && range.end <= row_start {
1993 outdent_to_row = outdent_to_row.min(range.start.row);
1994 }
1995 }
1996
1997 let suggestion = if outdent_to_row == prev_row {
1998 IndentSuggestion {
1999 basis_row: prev_row,
2000 indent: false,
2001 }
2002 } else if indent_from_prev_row {
2003 IndentSuggestion {
2004 basis_row: prev_row,
2005 indent: true,
2006 }
2007 } else if outdent_to_row < prev_row {
2008 IndentSuggestion {
2009 basis_row: outdent_to_row,
2010 indent: false,
2011 }
2012 } else {
2013 IndentSuggestion {
2014 basis_row: prev_row,
2015 indent: false,
2016 }
2017 };
2018
2019 prev_row = row;
2020 suggestion
2021 }))
2022 } else {
2023 None
2024 }
2025 }
2026
2027 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2028 while row > 0 {
2029 row -= 1;
2030 if !self.is_line_blank(row) {
2031 return Some(row);
2032 }
2033 }
2034 None
2035 }
2036
2037 pub fn chunks<'a, T: ToOffset>(
2038 &'a self,
2039 range: Range<T>,
2040 theme: Option<&'a SyntaxTheme>,
2041 ) -> BufferChunks<'a> {
2042 let range = range.start.to_offset(self)..range.end.to_offset(self);
2043
2044 let mut highlights = None;
2045 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
2046 if let Some(theme) = theme {
2047 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
2048 diagnostic_endpoints.push(DiagnosticEndpoint {
2049 offset: entry.range.start,
2050 is_start: true,
2051 severity: entry.diagnostic.severity,
2052 });
2053 diagnostic_endpoints.push(DiagnosticEndpoint {
2054 offset: entry.range.end,
2055 is_start: false,
2056 severity: entry.diagnostic.severity,
2057 });
2058 }
2059 diagnostic_endpoints
2060 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2061
2062 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
2063 let mut query_cursor = QueryCursorHandle::new();
2064
2065 // TODO - add a Tree-sitter API to remove the need for this.
2066 let cursor = unsafe {
2067 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
2068 };
2069 let captures = cursor.set_byte_range(range.clone()).captures(
2070 &grammar.highlights_query,
2071 tree.root_node(),
2072 TextProvider(self.text.as_rope()),
2073 );
2074 highlights = Some(BufferChunkHighlights {
2075 captures,
2076 next_capture: None,
2077 stack: Default::default(),
2078 highlight_map: grammar.highlight_map(),
2079 _query_cursor: query_cursor,
2080 theme,
2081 })
2082 }
2083 }
2084
2085 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2086 let chunks = self.text.as_rope().chunks_in_range(range.clone());
2087
2088 BufferChunks {
2089 range,
2090 chunks,
2091 diagnostic_endpoints,
2092 error_depth: 0,
2093 warning_depth: 0,
2094 information_depth: 0,
2095 hint_depth: 0,
2096 highlights,
2097 }
2098 }
2099
2100 pub fn language(&self) -> Option<&Arc<Language>> {
2101 self.language.as_ref()
2102 }
2103
2104 fn grammar(&self) -> Option<&Arc<Grammar>> {
2105 self.language
2106 .as_ref()
2107 .and_then(|language| language.grammar.as_ref())
2108 }
2109
2110 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2111 if let Some(tree) = self.tree.as_ref() {
2112 let root = tree.root_node();
2113 let range = range.start.to_offset(self)..range.end.to_offset(self);
2114 let mut node = root.descendant_for_byte_range(range.start, range.end);
2115 while node.map_or(false, |n| n.byte_range() == range) {
2116 node = node.unwrap().parent();
2117 }
2118 node.map(|n| n.byte_range())
2119 } else {
2120 None
2121 }
2122 }
2123
2124 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2125 let tree = self.tree.as_ref()?;
2126 let grammar = self
2127 .language
2128 .as_ref()
2129 .and_then(|language| language.grammar.as_ref())?;
2130
2131 let mut cursor = QueryCursorHandle::new();
2132 let matches = cursor.matches(
2133 &grammar.outline_query,
2134 tree.root_node(),
2135 TextProvider(self.as_rope()),
2136 );
2137
2138 let mut chunks = self.chunks(0..self.len(), theme);
2139
2140 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
2141 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
2142 let context_capture_ix = grammar
2143 .outline_query
2144 .capture_index_for_name("context")
2145 .unwrap_or(u32::MAX);
2146
2147 let mut stack = Vec::<Range<usize>>::new();
2148 let items = matches
2149 .filter_map(|mat| {
2150 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
2151 let range = item_node.start_byte()..item_node.end_byte();
2152 let mut text = String::new();
2153 let mut name_ranges = Vec::new();
2154 let mut highlight_ranges = Vec::new();
2155
2156 for capture in mat.captures {
2157 let node_is_name;
2158 if capture.index == name_capture_ix {
2159 node_is_name = true;
2160 } else if capture.index == context_capture_ix {
2161 node_is_name = false;
2162 } else {
2163 continue;
2164 }
2165
2166 let range = capture.node.start_byte()..capture.node.end_byte();
2167 if !text.is_empty() {
2168 text.push(' ');
2169 }
2170 if node_is_name {
2171 let mut start = text.len();
2172 let end = start + range.len();
2173
2174 // When multiple names are captured, then the matcheable text
2175 // includes the whitespace in between the names.
2176 if !name_ranges.is_empty() {
2177 start -= 1;
2178 }
2179
2180 name_ranges.push(start..end);
2181 }
2182
2183 let mut offset = range.start;
2184 chunks.seek(offset);
2185 while let Some(mut chunk) = chunks.next() {
2186 if chunk.text.len() > range.end - offset {
2187 chunk.text = &chunk.text[0..(range.end - offset)];
2188 offset = range.end;
2189 } else {
2190 offset += chunk.text.len();
2191 }
2192 if let Some(style) = chunk.highlight_style {
2193 let start = text.len();
2194 let end = start + chunk.text.len();
2195 highlight_ranges.push((start..end, style));
2196 }
2197 text.push_str(chunk.text);
2198 if offset >= range.end {
2199 break;
2200 }
2201 }
2202 }
2203
2204 while stack.last().map_or(false, |prev_range| {
2205 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
2206 }) {
2207 stack.pop();
2208 }
2209 stack.push(range.clone());
2210
2211 Some(OutlineItem {
2212 depth: stack.len() - 1,
2213 range: self.anchor_after(range.start)..self.anchor_before(range.end),
2214 text,
2215 highlight_ranges,
2216 name_ranges,
2217 })
2218 })
2219 .collect::<Vec<_>>();
2220
2221 if items.is_empty() {
2222 None
2223 } else {
2224 Some(Outline::new(items))
2225 }
2226 }
2227
2228 pub fn enclosing_bracket_ranges<T: ToOffset>(
2229 &self,
2230 range: Range<T>,
2231 ) -> Option<(Range<usize>, Range<usize>)> {
2232 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
2233 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
2234 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
2235
2236 // Find bracket pairs that *inclusively* contain the given range.
2237 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
2238 let mut cursor = QueryCursorHandle::new();
2239 let matches = cursor.set_byte_range(range).matches(
2240 &grammar.brackets_query,
2241 tree.root_node(),
2242 TextProvider(self.as_rope()),
2243 );
2244
2245 // Get the ranges of the innermost pair of brackets.
2246 matches
2247 .filter_map(|mat| {
2248 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
2249 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
2250 Some((open.byte_range(), close.byte_range()))
2251 })
2252 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
2253 }
2254
2255 /*
2256 impl BufferSnapshot
2257 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
2258 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
2259 */
2260
2261 pub fn remote_selections_in_range<'a>(
2262 &'a self,
2263 range: Range<Anchor>,
2264 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
2265 {
2266 self.remote_selections
2267 .iter()
2268 .filter(|(replica_id, set)| {
2269 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2270 })
2271 .map(move |(replica_id, set)| {
2272 let start_ix = match set.selections.binary_search_by(|probe| {
2273 probe
2274 .end
2275 .cmp(&range.start, self)
2276 .unwrap()
2277 .then(Ordering::Greater)
2278 }) {
2279 Ok(ix) | Err(ix) => ix,
2280 };
2281 let end_ix = match set.selections.binary_search_by(|probe| {
2282 probe
2283 .start
2284 .cmp(&range.end, self)
2285 .unwrap()
2286 .then(Ordering::Less)
2287 }) {
2288 Ok(ix) | Err(ix) => ix,
2289 };
2290
2291 (*replica_id, set.selections[start_ix..end_ix].iter())
2292 })
2293 }
2294
2295 pub fn diagnostics_in_range<'a, T, O>(
2296 &'a self,
2297 search_range: Range<T>,
2298 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2299 where
2300 T: 'a + Clone + ToOffset,
2301 O: 'a + FromAnchor,
2302 {
2303 self.diagnostics.range(search_range.clone(), self, true)
2304 }
2305
2306 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2307 let mut groups = Vec::new();
2308 self.diagnostics.groups(&mut groups, self);
2309 groups
2310 }
2311
2312 pub fn diagnostic_group<'a, O>(
2313 &'a self,
2314 group_id: usize,
2315 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2316 where
2317 O: 'a + FromAnchor,
2318 {
2319 self.diagnostics.group(group_id, self)
2320 }
2321
2322 pub fn diagnostics_update_count(&self) -> usize {
2323 self.diagnostics_update_count
2324 }
2325
2326 pub fn parse_count(&self) -> usize {
2327 self.parse_count
2328 }
2329
2330 pub fn selections_update_count(&self) -> usize {
2331 self.selections_update_count
2332 }
2333}
2334
2335impl Clone for BufferSnapshot {
2336 fn clone(&self) -> Self {
2337 Self {
2338 text: self.text.clone(),
2339 tree: self.tree.clone(),
2340 remote_selections: self.remote_selections.clone(),
2341 diagnostics: self.diagnostics.clone(),
2342 selections_update_count: self.selections_update_count,
2343 diagnostics_update_count: self.diagnostics_update_count,
2344 is_parsing: self.is_parsing,
2345 language: self.language.clone(),
2346 parse_count: self.parse_count,
2347 }
2348 }
2349}
2350
2351impl Deref for BufferSnapshot {
2352 type Target = text::BufferSnapshot;
2353
2354 fn deref(&self) -> &Self::Target {
2355 &self.text
2356 }
2357}
2358
2359impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2360 type I = ByteChunks<'a>;
2361
2362 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2363 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2364 }
2365}
2366
2367struct ByteChunks<'a>(rope::Chunks<'a>);
2368
2369impl<'a> Iterator for ByteChunks<'a> {
2370 type Item = &'a [u8];
2371
2372 fn next(&mut self) -> Option<Self::Item> {
2373 self.0.next().map(str::as_bytes)
2374 }
2375}
2376
2377unsafe impl<'a> Send for BufferChunks<'a> {}
2378
2379impl<'a> BufferChunks<'a> {
2380 pub fn seek(&mut self, offset: usize) {
2381 self.range.start = offset;
2382 self.chunks.seek(self.range.start);
2383 if let Some(highlights) = self.highlights.as_mut() {
2384 highlights
2385 .stack
2386 .retain(|(end_offset, _)| *end_offset > offset);
2387 if let Some((mat, capture_ix)) = &highlights.next_capture {
2388 let capture = mat.captures[*capture_ix as usize];
2389 if offset >= capture.node.start_byte() {
2390 let next_capture_end = capture.node.end_byte();
2391 if offset < next_capture_end {
2392 highlights.stack.push((
2393 next_capture_end,
2394 highlights.highlight_map.get(capture.index),
2395 ));
2396 }
2397 highlights.next_capture.take();
2398 }
2399 }
2400 highlights.captures.set_byte_range(self.range.clone());
2401 }
2402 }
2403
2404 pub fn offset(&self) -> usize {
2405 self.range.start
2406 }
2407
2408 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2409 let depth = match endpoint.severity {
2410 DiagnosticSeverity::ERROR => &mut self.error_depth,
2411 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2412 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2413 DiagnosticSeverity::HINT => &mut self.hint_depth,
2414 _ => return,
2415 };
2416 if endpoint.is_start {
2417 *depth += 1;
2418 } else {
2419 *depth -= 1;
2420 }
2421 }
2422
2423 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2424 if self.error_depth > 0 {
2425 Some(DiagnosticSeverity::ERROR)
2426 } else if self.warning_depth > 0 {
2427 Some(DiagnosticSeverity::WARNING)
2428 } else if self.information_depth > 0 {
2429 Some(DiagnosticSeverity::INFORMATION)
2430 } else if self.hint_depth > 0 {
2431 Some(DiagnosticSeverity::HINT)
2432 } else {
2433 None
2434 }
2435 }
2436}
2437
2438impl<'a> Iterator for BufferChunks<'a> {
2439 type Item = Chunk<'a>;
2440
2441 fn next(&mut self) -> Option<Self::Item> {
2442 let mut next_capture_start = usize::MAX;
2443 let mut next_diagnostic_endpoint = usize::MAX;
2444
2445 if let Some(highlights) = self.highlights.as_mut() {
2446 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2447 if *parent_capture_end <= self.range.start {
2448 highlights.stack.pop();
2449 } else {
2450 break;
2451 }
2452 }
2453
2454 if highlights.next_capture.is_none() {
2455 highlights.next_capture = highlights.captures.next();
2456 }
2457
2458 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2459 let capture = mat.captures[*capture_ix as usize];
2460 if self.range.start < capture.node.start_byte() {
2461 next_capture_start = capture.node.start_byte();
2462 break;
2463 } else {
2464 let highlight_id = highlights.highlight_map.get(capture.index);
2465 highlights
2466 .stack
2467 .push((capture.node.end_byte(), highlight_id));
2468 highlights.next_capture = highlights.captures.next();
2469 }
2470 }
2471 }
2472
2473 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2474 if endpoint.offset <= self.range.start {
2475 self.update_diagnostic_depths(endpoint);
2476 self.diagnostic_endpoints.next();
2477 } else {
2478 next_diagnostic_endpoint = endpoint.offset;
2479 break;
2480 }
2481 }
2482
2483 if let Some(chunk) = self.chunks.peek() {
2484 let chunk_start = self.range.start;
2485 let mut chunk_end = (self.chunks.offset() + chunk.len())
2486 .min(next_capture_start)
2487 .min(next_diagnostic_endpoint);
2488 let mut highlight_style = None;
2489 if let Some(highlights) = self.highlights.as_ref() {
2490 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2491 chunk_end = chunk_end.min(*parent_capture_end);
2492 highlight_style = parent_highlight_id.style(highlights.theme);
2493 }
2494 }
2495
2496 let slice =
2497 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2498 self.range.start = chunk_end;
2499 if self.range.start == self.chunks.offset() + chunk.len() {
2500 self.chunks.next().unwrap();
2501 }
2502
2503 Some(Chunk {
2504 text: slice,
2505 highlight_style,
2506 diagnostic: self.current_diagnostic_severity(),
2507 })
2508 } else {
2509 None
2510 }
2511 }
2512}
2513
2514impl QueryCursorHandle {
2515 pub(crate) fn new() -> Self {
2516 QueryCursorHandle(Some(
2517 QUERY_CURSORS
2518 .lock()
2519 .pop()
2520 .unwrap_or_else(|| QueryCursor::new()),
2521 ))
2522 }
2523}
2524
2525impl Deref for QueryCursorHandle {
2526 type Target = QueryCursor;
2527
2528 fn deref(&self) -> &Self::Target {
2529 self.0.as_ref().unwrap()
2530 }
2531}
2532
2533impl DerefMut for QueryCursorHandle {
2534 fn deref_mut(&mut self) -> &mut Self::Target {
2535 self.0.as_mut().unwrap()
2536 }
2537}
2538
2539impl Drop for QueryCursorHandle {
2540 fn drop(&mut self) {
2541 let mut cursor = self.0.take().unwrap();
2542 cursor.set_byte_range(0..usize::MAX);
2543 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2544 QUERY_CURSORS.lock().push(cursor)
2545 }
2546}
2547
2548trait ToTreeSitterPoint {
2549 fn to_ts_point(self) -> tree_sitter::Point;
2550 fn from_ts_point(point: tree_sitter::Point) -> Self;
2551}
2552
2553impl ToTreeSitterPoint for Point {
2554 fn to_ts_point(self) -> tree_sitter::Point {
2555 tree_sitter::Point::new(self.row as usize, self.column as usize)
2556 }
2557
2558 fn from_ts_point(point: tree_sitter::Point) -> Self {
2559 Point::new(point.row as u32, point.column as u32)
2560 }
2561}
2562
2563impl operation_queue::Operation for Operation {
2564 fn lamport_timestamp(&self) -> clock::Lamport {
2565 match self {
2566 Operation::Buffer(_) => {
2567 unreachable!("buffer operations should never be deferred at this layer")
2568 }
2569 Operation::UpdateDiagnostics {
2570 lamport_timestamp, ..
2571 }
2572 | Operation::UpdateSelections {
2573 lamport_timestamp, ..
2574 } => *lamport_timestamp,
2575 Operation::UpdateCompletionTriggers { .. } => {
2576 unreachable!("updating completion triggers should never be deferred")
2577 }
2578 }
2579 }
2580}
2581
2582impl Default for Diagnostic {
2583 fn default() -> Self {
2584 Self {
2585 code: Default::default(),
2586 severity: DiagnosticSeverity::ERROR,
2587 message: Default::default(),
2588 group_id: Default::default(),
2589 is_primary: Default::default(),
2590 is_valid: true,
2591 is_disk_based: false,
2592 }
2593 }
2594}
2595
2596impl<T> Completion<T> {
2597 pub fn label(&self) -> &str {
2598 &self.lsp_completion.label
2599 }
2600
2601 pub fn filter_range(&self) -> Range<usize> {
2602 if let Some(filter_text) = self.lsp_completion.filter_text.as_deref() {
2603 if let Some(start) = self.label().find(filter_text) {
2604 start..start + filter_text.len()
2605 } else {
2606 0..self.label().len()
2607 }
2608 } else {
2609 0..self.label().len()
2610 }
2611 }
2612
2613 pub fn sort_key(&self) -> (usize, &str) {
2614 let kind_key = match self.lsp_completion.kind {
2615 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2616 _ => 1,
2617 };
2618 (kind_key, &self.label()[self.filter_range()])
2619 }
2620
2621 pub fn is_snippet(&self) -> bool {
2622 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2623 }
2624}
2625
2626pub fn contiguous_ranges(
2627 values: impl Iterator<Item = u32>,
2628 max_len: usize,
2629) -> impl Iterator<Item = Range<u32>> {
2630 let mut values = values.into_iter();
2631 let mut current_range: Option<Range<u32>> = None;
2632 std::iter::from_fn(move || loop {
2633 if let Some(value) = values.next() {
2634 if let Some(range) = &mut current_range {
2635 if value == range.end && range.len() < max_len {
2636 range.end += 1;
2637 continue;
2638 }
2639 }
2640
2641 let prev_range = current_range.clone();
2642 current_range = Some(value..(value + 1));
2643 if prev_range.is_some() {
2644 return prev_range;
2645 }
2646 } else {
2647 return current_range.take();
2648 }
2649 })
2650}