1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, CodeLabel, Outline, ToLspPosition,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cmp::{self, Ordering},
25 collections::{BTreeMap, HashMap},
26 ffi::OsString,
27 future::Future,
28 iter::{Iterator, Peekable},
29 ops::{Deref, DerefMut, Range, Sub},
30 path::{Path, PathBuf},
31 str,
32 sync::Arc,
33 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
34 vec,
35};
36use sum_tree::TreeMap;
37use text::{operation_queue::OperationQueue, rope::TextDimension};
38pub use text::{Buffer as TextBuffer, Operation as _, *};
39use theme::SyntaxTheme;
40use tree_sitter::{InputEdit, QueryCursor, Tree};
41use util::{post_inc, TryFutureExt as _};
42
43#[cfg(any(test, feature = "test-support"))]
44pub use tree_sitter_rust;
45
46pub use lsp::DiagnosticSeverity;
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 remote_selections: TreeMap<ReplicaId, SelectionSet>,
69 selections_update_count: usize,
70 diagnostics_update_count: usize,
71 diagnostics_timestamp: clock::Lamport,
72 file_update_count: usize,
73 language_server: Option<LanguageServerState>,
74 completion_triggers: Vec<String>,
75 deferred_ops: OperationQueue<Operation>,
76}
77
78pub struct BufferSnapshot {
79 text: text::BufferSnapshot,
80 tree: Option<Tree>,
81 path: Option<Arc<Path>>,
82 diagnostics: DiagnosticSet,
83 diagnostics_update_count: usize,
84 file_update_count: usize,
85 remote_selections: TreeMap<ReplicaId, SelectionSet>,
86 selections_update_count: usize,
87 is_parsing: bool,
88 language: Option<Arc<Language>>,
89 parse_count: usize,
90}
91
92#[derive(Clone, Debug)]
93struct SelectionSet {
94 selections: Arc<[Selection<Anchor>]>,
95 lamport_timestamp: clock::Lamport,
96}
97
98#[derive(Clone, Debug, PartialEq, Eq)]
99pub struct GroupId {
100 source: Arc<str>,
101 id: usize,
102}
103
104#[derive(Clone, Debug, PartialEq, Eq)]
105pub struct Diagnostic {
106 pub code: Option<String>,
107 pub severity: DiagnosticSeverity,
108 pub message: String,
109 pub group_id: usize,
110 pub is_valid: bool,
111 pub is_primary: bool,
112 pub is_disk_based: bool,
113}
114
115#[derive(Clone, Debug)]
116pub struct Completion {
117 pub old_range: Range<Anchor>,
118 pub new_text: String,
119 pub label: CodeLabel,
120 pub lsp_completion: lsp::CompletionItem,
121}
122
123#[derive(Clone, Debug)]
124pub struct CodeAction {
125 pub range: Range<Anchor>,
126 pub lsp_action: lsp::CodeAction,
127}
128
129struct LanguageServerState {
130 server: Arc<LanguageServer>,
131 latest_snapshot: watch::Sender<LanguageServerSnapshot>,
132 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
133 next_version: usize,
134 _maintain_server: Task<Option<()>>,
135}
136
137#[derive(Clone)]
138struct LanguageServerSnapshot {
139 buffer_snapshot: text::BufferSnapshot,
140 version: usize,
141 path: Arc<Path>,
142}
143
144#[derive(Clone, Debug, PartialEq, Eq)]
145pub enum Operation {
146 Buffer(text::Operation),
147 UpdateDiagnostics {
148 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
149 lamport_timestamp: clock::Lamport,
150 },
151 UpdateSelections {
152 selections: Arc<[Selection<Anchor>]>,
153 lamport_timestamp: clock::Lamport,
154 },
155 UpdateCompletionTriggers {
156 triggers: Vec<String>,
157 lamport_timestamp: clock::Lamport,
158 },
159}
160
161#[derive(Clone, Debug, PartialEq, Eq)]
162pub enum Event {
163 Operation(Operation),
164 Edited,
165 Dirtied,
166 Saved,
167 FileHandleChanged,
168 Reloaded,
169 Reparsed,
170 DiagnosticsUpdated,
171 Closed,
172}
173
174pub trait File {
175 fn as_local(&self) -> Option<&dyn LocalFile>;
176
177 fn is_local(&self) -> bool {
178 self.as_local().is_some()
179 }
180
181 fn mtime(&self) -> SystemTime;
182
183 /// Returns the path of this file relative to the worktree's root directory.
184 fn path(&self) -> &Arc<Path>;
185
186 /// Returns the path of this file relative to the worktree's parent directory (this means it
187 /// includes the name of the worktree's root folder).
188 fn full_path(&self, cx: &AppContext) -> PathBuf;
189
190 /// Returns the last component of this handle's absolute path. If this handle refers to the root
191 /// of its worktree, then this method will return the name of the worktree itself.
192 fn file_name(&self, cx: &AppContext) -> OsString;
193
194 fn is_deleted(&self) -> bool;
195
196 fn save(
197 &self,
198 buffer_id: u64,
199 text: Rope,
200 version: clock::Global,
201 cx: &mut MutableAppContext,
202 ) -> Task<Result<(clock::Global, SystemTime)>>;
203
204 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
205
206 fn as_any(&self) -> &dyn Any;
207
208 fn to_proto(&self) -> rpc::proto::File;
209}
210
211pub trait LocalFile: File {
212 /// Returns the absolute path of this file.
213 fn abs_path(&self, cx: &AppContext) -> PathBuf;
214
215 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
216
217 fn buffer_reloaded(
218 &self,
219 buffer_id: u64,
220 version: &clock::Global,
221 mtime: SystemTime,
222 cx: &mut MutableAppContext,
223 );
224}
225
226#[cfg(any(test, feature = "test-support"))]
227pub struct FakeFile {
228 pub path: Arc<Path>,
229}
230
231#[cfg(any(test, feature = "test-support"))]
232impl FakeFile {
233 pub fn new(path: impl AsRef<Path>) -> Self {
234 Self {
235 path: path.as_ref().into(),
236 }
237 }
238}
239
240#[cfg(any(test, feature = "test-support"))]
241impl File for FakeFile {
242 fn as_local(&self) -> Option<&dyn LocalFile> {
243 Some(self)
244 }
245
246 fn mtime(&self) -> SystemTime {
247 SystemTime::UNIX_EPOCH
248 }
249
250 fn path(&self) -> &Arc<Path> {
251 &self.path
252 }
253
254 fn full_path(&self, _: &AppContext) -> PathBuf {
255 self.path.to_path_buf()
256 }
257
258 fn file_name(&self, _: &AppContext) -> OsString {
259 self.path.file_name().unwrap().to_os_string()
260 }
261
262 fn is_deleted(&self) -> bool {
263 false
264 }
265
266 fn save(
267 &self,
268 _: u64,
269 _: Rope,
270 _: clock::Global,
271 cx: &mut MutableAppContext,
272 ) -> Task<Result<(clock::Global, SystemTime)>> {
273 cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
274 }
275
276 fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
277
278 fn as_any(&self) -> &dyn Any {
279 self
280 }
281
282 fn to_proto(&self) -> rpc::proto::File {
283 unimplemented!()
284 }
285}
286
287#[cfg(any(test, feature = "test-support"))]
288impl LocalFile for FakeFile {
289 fn abs_path(&self, _: &AppContext) -> PathBuf {
290 self.path.to_path_buf()
291 }
292
293 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
294 cx.background().spawn(async move { Ok(Default::default()) })
295 }
296
297 fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
298 }
299}
300
301pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
302
303#[derive(Clone)]
304struct SyntaxTree {
305 tree: Tree,
306 version: clock::Global,
307}
308
309#[derive(Clone)]
310struct AutoindentRequest {
311 before_edit: BufferSnapshot,
312 edited: Vec<Anchor>,
313 inserted: Option<Vec<Range<Anchor>>>,
314}
315
316#[derive(Debug)]
317struct IndentSuggestion {
318 basis_row: u32,
319 indent: bool,
320}
321
322pub(crate) struct TextProvider<'a>(pub(crate) &'a Rope);
323
324struct BufferChunkHighlights<'a> {
325 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
326 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
327 stack: Vec<(usize, HighlightId)>,
328 highlight_map: HighlightMap,
329 _query_cursor: QueryCursorHandle,
330}
331
332pub struct BufferChunks<'a> {
333 range: Range<usize>,
334 chunks: rope::Chunks<'a>,
335 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
336 error_depth: usize,
337 warning_depth: usize,
338 information_depth: usize,
339 hint_depth: usize,
340 highlights: Option<BufferChunkHighlights<'a>>,
341}
342
343#[derive(Clone, Copy, Debug, Default)]
344pub struct Chunk<'a> {
345 pub text: &'a str,
346 pub highlight_id: Option<HighlightId>,
347 pub diagnostic: Option<DiagnosticSeverity>,
348}
349
350pub(crate) struct Diff {
351 base_version: clock::Global,
352 new_text: Arc<str>,
353 changes: Vec<(ChangeTag, usize)>,
354 start_offset: usize,
355}
356
357#[derive(Clone, Copy)]
358pub(crate) struct DiagnosticEndpoint {
359 offset: usize,
360 is_start: bool,
361 severity: DiagnosticSeverity,
362}
363
364#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
365pub enum CharKind {
366 Newline,
367 Punctuation,
368 Whitespace,
369 Word,
370}
371
372impl Buffer {
373 pub fn new<T: Into<Arc<str>>>(
374 replica_id: ReplicaId,
375 base_text: T,
376 cx: &mut ModelContext<Self>,
377 ) -> Self {
378 Self::build(
379 TextBuffer::new(
380 replica_id,
381 cx.model_id() as u64,
382 History::new(base_text.into()),
383 ),
384 None,
385 )
386 }
387
388 pub fn from_file<T: Into<Arc<str>>>(
389 replica_id: ReplicaId,
390 base_text: T,
391 file: Box<dyn File>,
392 cx: &mut ModelContext<Self>,
393 ) -> Self {
394 Self::build(
395 TextBuffer::new(
396 replica_id,
397 cx.model_id() as u64,
398 History::new(base_text.into()),
399 ),
400 Some(file),
401 )
402 }
403
404 pub fn from_proto(
405 replica_id: ReplicaId,
406 message: proto::BufferState,
407 file: Option<Box<dyn File>>,
408 cx: &mut ModelContext<Self>,
409 ) -> Result<Self> {
410 let buffer = TextBuffer::new(
411 replica_id,
412 message.id,
413 History::new(Arc::from(message.base_text)),
414 );
415 let mut this = Self::build(buffer, file);
416 let ops = message
417 .operations
418 .into_iter()
419 .map(proto::deserialize_operation)
420 .collect::<Result<Vec<_>>>()?;
421 this.apply_ops(ops, cx)?;
422
423 for selection_set in message.selections {
424 let lamport_timestamp = clock::Lamport {
425 replica_id: selection_set.replica_id as ReplicaId,
426 value: selection_set.lamport_timestamp,
427 };
428 this.remote_selections.insert(
429 selection_set.replica_id as ReplicaId,
430 SelectionSet {
431 selections: proto::deserialize_selections(selection_set.selections),
432 lamport_timestamp,
433 },
434 );
435 this.text.lamport_clock.observe(lamport_timestamp);
436 }
437 let snapshot = this.snapshot();
438 let entries = proto::deserialize_diagnostics(message.diagnostics);
439 this.apply_diagnostic_update(
440 DiagnosticSet::from_sorted_entries(entries.iter().cloned(), &snapshot),
441 clock::Lamport {
442 replica_id: 0,
443 value: message.diagnostics_timestamp,
444 },
445 cx,
446 );
447
448 this.completion_triggers = message.completion_triggers;
449
450 Ok(this)
451 }
452
453 pub fn to_proto(&self) -> proto::BufferState {
454 let mut operations = self
455 .text
456 .history()
457 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone())))
458 .chain(self.deferred_ops.iter().map(proto::serialize_operation))
459 .collect::<Vec<_>>();
460 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
461 proto::BufferState {
462 id: self.remote_id(),
463 file: self.file.as_ref().map(|f| f.to_proto()),
464 base_text: self.base_text().to_string(),
465 operations,
466 selections: self
467 .remote_selections
468 .iter()
469 .map(|(replica_id, set)| proto::SelectionSet {
470 replica_id: *replica_id as u32,
471 selections: proto::serialize_selections(&set.selections),
472 lamport_timestamp: set.lamport_timestamp.value,
473 })
474 .collect(),
475 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
476 diagnostics_timestamp: self.diagnostics_timestamp.value,
477 completion_triggers: self.completion_triggers.clone(),
478 }
479 }
480
481 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
482 self.set_language(Some(language), cx);
483 self
484 }
485
486 pub fn with_language_server(
487 mut self,
488 server: Arc<LanguageServer>,
489 cx: &mut ModelContext<Self>,
490 ) -> Self {
491 self.set_language_server(Some(server), cx);
492 self
493 }
494
495 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
496 let saved_mtime;
497 if let Some(file) = file.as_ref() {
498 saved_mtime = file.mtime();
499 } else {
500 saved_mtime = UNIX_EPOCH;
501 }
502
503 Self {
504 saved_mtime,
505 saved_version: buffer.version(),
506 text: buffer,
507 file,
508 syntax_tree: Mutex::new(None),
509 parsing_in_background: false,
510 parse_count: 0,
511 sync_parse_timeout: Duration::from_millis(1),
512 autoindent_requests: Default::default(),
513 pending_autoindent: Default::default(),
514 language: None,
515 remote_selections: Default::default(),
516 selections_update_count: 0,
517 diagnostics: Default::default(),
518 diagnostics_update_count: 0,
519 diagnostics_timestamp: Default::default(),
520 file_update_count: 0,
521 language_server: None,
522 completion_triggers: Default::default(),
523 deferred_ops: OperationQueue::new(),
524 }
525 }
526
527 pub fn snapshot(&self) -> BufferSnapshot {
528 BufferSnapshot {
529 text: self.text.snapshot(),
530 tree: self.syntax_tree(),
531 path: self.file.as_ref().map(|f| f.path().clone()),
532 remote_selections: self.remote_selections.clone(),
533 diagnostics: self.diagnostics.clone(),
534 diagnostics_update_count: self.diagnostics_update_count,
535 file_update_count: self.file_update_count,
536 is_parsing: self.parsing_in_background,
537 language: self.language.clone(),
538 parse_count: self.parse_count,
539 selections_update_count: self.selections_update_count,
540 }
541 }
542
543 pub fn file(&self) -> Option<&dyn File> {
544 self.file.as_deref()
545 }
546
547 pub fn save(
548 &mut self,
549 cx: &mut ModelContext<Self>,
550 ) -> Task<Result<(clock::Global, SystemTime)>> {
551 let file = if let Some(file) = self.file.as_ref() {
552 file
553 } else {
554 return Task::ready(Err(anyhow!("buffer has no file")));
555 };
556 let text = self.as_rope().clone();
557 let version = self.version();
558 let save = file.save(self.remote_id(), text, version, cx.as_mut());
559 cx.spawn(|this, mut cx| async move {
560 let (version, mtime) = save.await?;
561 this.update(&mut cx, |this, cx| {
562 this.did_save(version.clone(), mtime, None, cx);
563 });
564 Ok((version, mtime))
565 })
566 }
567
568 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
569 self.language = language;
570 self.reparse(cx);
571 }
572
573 pub fn set_language_server(
574 &mut self,
575 language_server: Option<Arc<lsp::LanguageServer>>,
576 cx: &mut ModelContext<Self>,
577 ) {
578 self.language_server = if let Some((server, file)) =
579 language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
580 {
581 let initial_snapshot = LanguageServerSnapshot {
582 buffer_snapshot: self.text.snapshot(),
583 version: 0,
584 path: file.abs_path(cx).into(),
585 };
586 let (latest_snapshot_tx, mut latest_snapshot_rx) =
587 watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
588
589 Some(LanguageServerState {
590 latest_snapshot: latest_snapshot_tx,
591 pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
592 next_version: 1,
593 server: server.clone(),
594 _maintain_server: cx.spawn_weak(|this, mut cx| async move {
595 let capabilities = server.capabilities().await.or_else(|| {
596 log::info!("language server exited");
597 if let Some(this) = this.upgrade(&cx) {
598 this.update(&mut cx, |this, _| this.language_server = None);
599 }
600 None
601 })?;
602
603 let triggers = capabilities
604 .completion_provider
605 .and_then(|c| c.trigger_characters)
606 .unwrap_or_default();
607 this.upgrade(&cx)?.update(&mut cx, |this, cx| {
608 let lamport_timestamp = this.text.lamport_clock.tick();
609 this.completion_triggers = triggers.clone();
610 this.send_operation(
611 Operation::UpdateCompletionTriggers {
612 triggers,
613 lamport_timestamp,
614 },
615 cx,
616 );
617 cx.notify();
618 });
619
620 let maintain_changes = cx.background().spawn(async move {
621 let initial_snapshot =
622 latest_snapshot_rx.recv().await.ok_or_else(|| {
623 anyhow!("buffer dropped before sending DidOpenTextDocument")
624 })?;
625 server
626 .notify::<lsp::notification::DidOpenTextDocument>(
627 lsp::DidOpenTextDocumentParams {
628 text_document: lsp::TextDocumentItem::new(
629 lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
630 Default::default(),
631 initial_snapshot.version as i32,
632 initial_snapshot.buffer_snapshot.text(),
633 ),
634 },
635 )
636 .await?;
637
638 let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
639 while let Some(snapshot) = latest_snapshot_rx.recv().await {
640 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
641 let buffer_snapshot = snapshot.buffer_snapshot.clone();
642 let content_changes = buffer_snapshot
643 .edits_since::<(PointUtf16, usize)>(&prev_version)
644 .map(|edit| {
645 let edit_start = edit.new.start.0;
646 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
647 let new_text = buffer_snapshot
648 .text_for_range(edit.new.start.1..edit.new.end.1)
649 .collect();
650 lsp::TextDocumentContentChangeEvent {
651 range: Some(lsp::Range::new(
652 edit_start.to_lsp_position(),
653 edit_end.to_lsp_position(),
654 )),
655 range_length: None,
656 text: new_text,
657 }
658 })
659 .collect();
660 let changes = lsp::DidChangeTextDocumentParams {
661 text_document: lsp::VersionedTextDocumentIdentifier::new(
662 uri,
663 snapshot.version as i32,
664 ),
665 content_changes,
666 };
667 server
668 .notify::<lsp::notification::DidChangeTextDocument>(changes)
669 .await?;
670
671 prev_version = snapshot.buffer_snapshot.version().clone();
672 }
673
674 Ok::<_, anyhow::Error>(())
675 });
676
677 maintain_changes.log_err().await
678 }),
679 })
680 } else {
681 None
682 };
683 }
684
685 pub fn did_save(
686 &mut self,
687 version: clock::Global,
688 mtime: SystemTime,
689 new_file: Option<Box<dyn File>>,
690 cx: &mut ModelContext<Self>,
691 ) {
692 self.saved_mtime = mtime;
693 self.saved_version = version;
694 if let Some(new_file) = new_file {
695 self.file = Some(new_file);
696 self.file_update_count += 1;
697 }
698 if let Some((state, local_file)) = &self
699 .language_server
700 .as_ref()
701 .zip(self.file.as_ref().and_then(|f| f.as_local()))
702 {
703 cx.background()
704 .spawn(
705 state
706 .server
707 .notify::<lsp::notification::DidSaveTextDocument>(
708 lsp::DidSaveTextDocumentParams {
709 text_document: lsp::TextDocumentIdentifier {
710 uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
711 },
712 text: None,
713 },
714 ),
715 )
716 .detach()
717 }
718 cx.emit(Event::Saved);
719 cx.notify();
720 }
721
722 pub fn did_reload(
723 &mut self,
724 version: clock::Global,
725 mtime: SystemTime,
726 cx: &mut ModelContext<Self>,
727 ) {
728 self.saved_mtime = mtime;
729 self.saved_version = version;
730 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
731 file.buffer_reloaded(self.remote_id(), &self.saved_version, self.saved_mtime, cx);
732 }
733 cx.emit(Event::Reloaded);
734 cx.notify();
735 }
736
737 pub fn file_updated(
738 &mut self,
739 new_file: Box<dyn File>,
740 cx: &mut ModelContext<Self>,
741 ) -> Task<()> {
742 let old_file = if let Some(file) = self.file.as_ref() {
743 file
744 } else {
745 return Task::ready(());
746 };
747 let mut file_changed = false;
748 let mut task = Task::ready(());
749
750 if new_file.path() != old_file.path() {
751 file_changed = true;
752 }
753
754 if new_file.is_deleted() {
755 if !old_file.is_deleted() {
756 file_changed = true;
757 if !self.is_dirty() {
758 cx.emit(Event::Dirtied);
759 }
760 }
761 } else {
762 let new_mtime = new_file.mtime();
763 if new_mtime != old_file.mtime() {
764 file_changed = true;
765
766 if !self.is_dirty() {
767 task = cx.spawn(|this, mut cx| {
768 async move {
769 let new_text = this.read_with(&cx, |this, cx| {
770 this.file
771 .as_ref()
772 .and_then(|file| file.as_local().map(|f| f.load(cx)))
773 });
774 if let Some(new_text) = new_text {
775 let new_text = new_text.await?;
776 let diff = this
777 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
778 .await;
779 this.update(&mut cx, |this, cx| {
780 if this.apply_diff(diff, cx) {
781 this.did_reload(this.version(), new_mtime, cx);
782 }
783 });
784 }
785 Ok(())
786 }
787 .log_err()
788 .map(drop)
789 });
790 }
791 }
792 }
793
794 if file_changed {
795 self.file_update_count += 1;
796 cx.emit(Event::FileHandleChanged);
797 cx.notify();
798 }
799 self.file = Some(new_file);
800 task
801 }
802
803 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
804 cx.emit(Event::Closed);
805 }
806
807 pub fn language(&self) -> Option<&Arc<Language>> {
808 self.language.as_ref()
809 }
810
811 pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
812 self.language_server.as_ref().map(|state| &state.server)
813 }
814
815 pub fn parse_count(&self) -> usize {
816 self.parse_count
817 }
818
819 pub fn selections_update_count(&self) -> usize {
820 self.selections_update_count
821 }
822
823 pub fn diagnostics_update_count(&self) -> usize {
824 self.diagnostics_update_count
825 }
826
827 pub fn file_update_count(&self) -> usize {
828 self.file_update_count
829 }
830
831 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
832 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
833 self.interpolate_tree(syntax_tree);
834 Some(syntax_tree.tree.clone())
835 } else {
836 None
837 }
838 }
839
840 #[cfg(any(test, feature = "test-support"))]
841 pub fn is_parsing(&self) -> bool {
842 self.parsing_in_background
843 }
844
845 #[cfg(test)]
846 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
847 self.sync_parse_timeout = timeout;
848 }
849
850 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
851 if self.parsing_in_background {
852 return false;
853 }
854
855 if let Some(grammar) = self.grammar().cloned() {
856 let old_tree = self.syntax_tree();
857 let text = self.as_rope().clone();
858 let parsed_version = self.version();
859 let parse_task = cx.background().spawn({
860 let grammar = grammar.clone();
861 async move { grammar.parse_text(&text, old_tree) }
862 });
863
864 match cx
865 .background()
866 .block_with_timeout(self.sync_parse_timeout, parse_task)
867 {
868 Ok(new_tree) => {
869 self.did_finish_parsing(new_tree, parsed_version, cx);
870 return true;
871 }
872 Err(parse_task) => {
873 self.parsing_in_background = true;
874 cx.spawn(move |this, mut cx| async move {
875 let new_tree = parse_task.await;
876 this.update(&mut cx, move |this, cx| {
877 let grammar_changed = this
878 .grammar()
879 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
880 let parse_again =
881 this.version.changed_since(&parsed_version) || grammar_changed;
882 this.parsing_in_background = false;
883 this.did_finish_parsing(new_tree, parsed_version, cx);
884
885 if parse_again && this.reparse(cx) {
886 return;
887 }
888 });
889 })
890 .detach();
891 }
892 }
893 }
894 false
895 }
896
897 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
898 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
899 let (bytes, lines) = edit.flatten();
900 tree.tree.edit(&InputEdit {
901 start_byte: bytes.new.start,
902 old_end_byte: bytes.new.start + bytes.old.len(),
903 new_end_byte: bytes.new.end,
904 start_position: lines.new.start.to_ts_point(),
905 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
906 .to_ts_point(),
907 new_end_position: lines.new.end.to_ts_point(),
908 });
909 }
910 tree.version = self.version();
911 }
912
913 fn did_finish_parsing(
914 &mut self,
915 tree: Tree,
916 version: clock::Global,
917 cx: &mut ModelContext<Self>,
918 ) {
919 self.parse_count += 1;
920 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
921 self.request_autoindent(cx);
922 cx.emit(Event::Reparsed);
923 cx.notify();
924 }
925
926 pub fn update_diagnostics<T>(
927 &mut self,
928 mut diagnostics: Vec<DiagnosticEntry<T>>,
929 version: Option<i32>,
930 cx: &mut ModelContext<Self>,
931 ) -> Result<()>
932 where
933 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
934 {
935 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
936 Ordering::Equal
937 .then_with(|| b.is_primary.cmp(&a.is_primary))
938 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
939 .then_with(|| a.severity.cmp(&b.severity))
940 .then_with(|| a.message.cmp(&b.message))
941 }
942
943 let version = version.map(|version| version as usize);
944 let content =
945 if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
946 language_server.snapshot_for_version(version)?
947 } else {
948 self.deref()
949 };
950
951 diagnostics.sort_unstable_by(|a, b| {
952 Ordering::Equal
953 .then_with(|| a.range.start.cmp(&b.range.start))
954 .then_with(|| b.range.end.cmp(&a.range.end))
955 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
956 });
957
958 let mut sanitized_diagnostics = Vec::new();
959 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
960 let mut last_edit_old_end = T::default();
961 let mut last_edit_new_end = T::default();
962 'outer: for entry in diagnostics {
963 let mut start = entry.range.start;
964 let mut end = entry.range.end;
965
966 // Some diagnostics are based on files on disk instead of buffers'
967 // current contents. Adjust these diagnostics' ranges to reflect
968 // any unsaved edits.
969 if entry.diagnostic.is_disk_based {
970 while let Some(edit) = edits_since_save.peek() {
971 if edit.old.end <= start {
972 last_edit_old_end = edit.old.end;
973 last_edit_new_end = edit.new.end;
974 edits_since_save.next();
975 } else if edit.old.start <= end && edit.old.end >= start {
976 continue 'outer;
977 } else {
978 break;
979 }
980 }
981
982 let start_overshoot = start - last_edit_old_end;
983 start = last_edit_new_end;
984 start.add_assign(&start_overshoot);
985
986 let end_overshoot = end - last_edit_old_end;
987 end = last_edit_new_end;
988 end.add_assign(&end_overshoot);
989 }
990
991 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
992 let mut range = range.start.to_point(content)..range.end.to_point(content);
993 // Expand empty ranges by one character
994 if range.start == range.end {
995 range.end.column += 1;
996 range.end = content.clip_point(range.end, Bias::Right);
997 if range.start == range.end && range.end.column > 0 {
998 range.start.column -= 1;
999 range.start = content.clip_point(range.start, Bias::Left);
1000 }
1001 }
1002
1003 sanitized_diagnostics.push(DiagnosticEntry {
1004 range,
1005 diagnostic: entry.diagnostic,
1006 });
1007 }
1008 drop(edits_since_save);
1009
1010 let set = DiagnosticSet::new(sanitized_diagnostics, content);
1011 let lamport_timestamp = self.text.lamport_clock.tick();
1012 self.apply_diagnostic_update(set.clone(), lamport_timestamp, cx);
1013
1014 let op = Operation::UpdateDiagnostics {
1015 diagnostics: set.iter().cloned().collect(),
1016 lamport_timestamp,
1017 };
1018 self.send_operation(op, cx);
1019 Ok(())
1020 }
1021
1022 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1023 if let Some(indent_columns) = self.compute_autoindents() {
1024 let indent_columns = cx.background().spawn(indent_columns);
1025 match cx
1026 .background()
1027 .block_with_timeout(Duration::from_micros(500), indent_columns)
1028 {
1029 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
1030 Err(indent_columns) => {
1031 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1032 let indent_columns = indent_columns.await;
1033 this.update(&mut cx, |this, cx| {
1034 this.apply_autoindents(indent_columns, cx);
1035 });
1036 }));
1037 }
1038 }
1039 }
1040 }
1041
1042 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
1043 let max_rows_between_yields = 100;
1044 let snapshot = self.snapshot();
1045 if snapshot.language.is_none()
1046 || snapshot.tree.is_none()
1047 || self.autoindent_requests.is_empty()
1048 {
1049 return None;
1050 }
1051
1052 let autoindent_requests = self.autoindent_requests.clone();
1053 Some(async move {
1054 let mut indent_columns = BTreeMap::new();
1055 for request in autoindent_requests {
1056 let old_to_new_rows = request
1057 .edited
1058 .iter()
1059 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1060 .zip(
1061 request
1062 .edited
1063 .iter()
1064 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1065 )
1066 .collect::<BTreeMap<u32, u32>>();
1067
1068 let mut old_suggestions = HashMap::<u32, u32>::default();
1069 let old_edited_ranges =
1070 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1071 for old_edited_range in old_edited_ranges {
1072 let suggestions = request
1073 .before_edit
1074 .suggest_autoindents(old_edited_range.clone())
1075 .into_iter()
1076 .flatten();
1077 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1078 let indentation_basis = old_to_new_rows
1079 .get(&suggestion.basis_row)
1080 .and_then(|from_row| old_suggestions.get(from_row).copied())
1081 .unwrap_or_else(|| {
1082 request
1083 .before_edit
1084 .indent_column_for_line(suggestion.basis_row)
1085 });
1086 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1087 old_suggestions.insert(
1088 *old_to_new_rows.get(&old_row).unwrap(),
1089 indentation_basis + delta,
1090 );
1091 }
1092 yield_now().await;
1093 }
1094
1095 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1096 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1097 let new_edited_row_ranges =
1098 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1099 for new_edited_row_range in new_edited_row_ranges {
1100 let suggestions = snapshot
1101 .suggest_autoindents(new_edited_row_range.clone())
1102 .into_iter()
1103 .flatten();
1104 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1105 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1106 let new_indentation = indent_columns
1107 .get(&suggestion.basis_row)
1108 .copied()
1109 .unwrap_or_else(|| {
1110 snapshot.indent_column_for_line(suggestion.basis_row)
1111 })
1112 + delta;
1113 if old_suggestions
1114 .get(&new_row)
1115 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1116 {
1117 indent_columns.insert(new_row, new_indentation);
1118 }
1119 }
1120 yield_now().await;
1121 }
1122
1123 if let Some(inserted) = request.inserted.as_ref() {
1124 let inserted_row_ranges = contiguous_ranges(
1125 inserted
1126 .iter()
1127 .map(|range| range.to_point(&snapshot))
1128 .flat_map(|range| range.start.row..range.end.row + 1),
1129 max_rows_between_yields,
1130 );
1131 for inserted_row_range in inserted_row_ranges {
1132 let suggestions = snapshot
1133 .suggest_autoindents(inserted_row_range.clone())
1134 .into_iter()
1135 .flatten();
1136 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1137 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1138 let new_indentation = indent_columns
1139 .get(&suggestion.basis_row)
1140 .copied()
1141 .unwrap_or_else(|| {
1142 snapshot.indent_column_for_line(suggestion.basis_row)
1143 })
1144 + delta;
1145 indent_columns.insert(row, new_indentation);
1146 }
1147 yield_now().await;
1148 }
1149 }
1150 }
1151 indent_columns
1152 })
1153 }
1154
1155 fn apply_autoindents(
1156 &mut self,
1157 indent_columns: BTreeMap<u32, u32>,
1158 cx: &mut ModelContext<Self>,
1159 ) {
1160 self.autoindent_requests.clear();
1161 self.start_transaction();
1162 for (row, indent_column) in &indent_columns {
1163 self.set_indent_column_for_line(*row, *indent_column, cx);
1164 }
1165 self.end_transaction(cx);
1166 }
1167
1168 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1169 let current_column = self.indent_column_for_line(row);
1170 if column > current_column {
1171 let offset = Point::new(row, 0).to_offset(&*self);
1172 self.edit(
1173 [offset..offset],
1174 " ".repeat((column - current_column) as usize),
1175 cx,
1176 );
1177 } else if column < current_column {
1178 self.edit(
1179 [Point::new(row, 0)..Point::new(row, current_column - column)],
1180 "",
1181 cx,
1182 );
1183 }
1184 }
1185
1186 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1187 // TODO: it would be nice to not allocate here.
1188 let old_text = self.text();
1189 let base_version = self.version();
1190 cx.background().spawn(async move {
1191 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1192 .iter_all_changes()
1193 .map(|c| (c.tag(), c.value().len()))
1194 .collect::<Vec<_>>();
1195 Diff {
1196 base_version,
1197 new_text,
1198 changes,
1199 start_offset: 0,
1200 }
1201 })
1202 }
1203
1204 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1205 if self.version == diff.base_version {
1206 self.start_transaction();
1207 let mut offset = diff.start_offset;
1208 for (tag, len) in diff.changes {
1209 let range = offset..(offset + len);
1210 match tag {
1211 ChangeTag::Equal => offset += len,
1212 ChangeTag::Delete => {
1213 self.edit([range], "", cx);
1214 }
1215 ChangeTag::Insert => {
1216 self.edit(
1217 [offset..offset],
1218 &diff.new_text
1219 [range.start - diff.start_offset..range.end - diff.start_offset],
1220 cx,
1221 );
1222 offset += len;
1223 }
1224 }
1225 }
1226 self.end_transaction(cx);
1227 true
1228 } else {
1229 false
1230 }
1231 }
1232
1233 pub fn is_dirty(&self) -> bool {
1234 !self.saved_version.observed_all(&self.version)
1235 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1236 }
1237
1238 pub fn has_conflict(&self) -> bool {
1239 !self.saved_version.observed_all(&self.version)
1240 && self
1241 .file
1242 .as_ref()
1243 .map_or(false, |file| file.mtime() > self.saved_mtime)
1244 }
1245
1246 pub fn subscribe(&mut self) -> Subscription {
1247 self.text.subscribe()
1248 }
1249
1250 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1251 self.start_transaction_at(Instant::now())
1252 }
1253
1254 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1255 self.text.start_transaction_at(now)
1256 }
1257
1258 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1259 self.end_transaction_at(Instant::now(), cx)
1260 }
1261
1262 pub fn end_transaction_at(
1263 &mut self,
1264 now: Instant,
1265 cx: &mut ModelContext<Self>,
1266 ) -> Option<TransactionId> {
1267 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1268 let was_dirty = start_version != self.saved_version;
1269 self.did_edit(&start_version, was_dirty, cx);
1270 Some(transaction_id)
1271 } else {
1272 None
1273 }
1274 }
1275
1276 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1277 self.text.push_transaction(transaction, now);
1278 }
1279
1280 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1281 self.text.finalize_last_transaction()
1282 }
1283
1284 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1285 self.text.forget_transaction(transaction_id);
1286 }
1287
1288 pub fn wait_for_edits(
1289 &mut self,
1290 edit_ids: impl IntoIterator<Item = clock::Local>,
1291 ) -> impl Future<Output = ()> {
1292 self.text.wait_for_edits(edit_ids)
1293 }
1294
1295 pub fn wait_for_anchors<'a>(
1296 &mut self,
1297 anchors: impl IntoIterator<Item = &'a Anchor>,
1298 ) -> impl Future<Output = ()> {
1299 self.text.wait_for_anchors(anchors)
1300 }
1301
1302 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = ()> {
1303 self.text.wait_for_version(version)
1304 }
1305
1306 pub fn set_active_selections(
1307 &mut self,
1308 selections: Arc<[Selection<Anchor>]>,
1309 cx: &mut ModelContext<Self>,
1310 ) {
1311 let lamport_timestamp = self.text.lamport_clock.tick();
1312 self.remote_selections.insert(
1313 self.text.replica_id(),
1314 SelectionSet {
1315 selections: selections.clone(),
1316 lamport_timestamp,
1317 },
1318 );
1319 self.send_operation(
1320 Operation::UpdateSelections {
1321 selections,
1322 lamport_timestamp,
1323 },
1324 cx,
1325 );
1326 }
1327
1328 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1329 self.set_active_selections(Arc::from([]), cx);
1330 }
1331
1332 fn update_language_server(&mut self, cx: &AppContext) {
1333 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1334 language_server
1335 } else {
1336 return;
1337 };
1338 let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
1339 file
1340 } else {
1341 return;
1342 };
1343
1344 let version = post_inc(&mut language_server.next_version);
1345 let snapshot = LanguageServerSnapshot {
1346 buffer_snapshot: self.text.snapshot(),
1347 version,
1348 path: Arc::from(file.abs_path(cx)),
1349 };
1350 language_server
1351 .pending_snapshots
1352 .insert(version, snapshot.clone());
1353 let _ = language_server.latest_snapshot.blocking_send(snapshot);
1354 }
1355
1356 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1357 where
1358 T: Into<String>,
1359 {
1360 self.edit_internal([0..self.len()], text, false, cx)
1361 }
1362
1363 pub fn edit<I, S, T>(
1364 &mut self,
1365 ranges_iter: I,
1366 new_text: T,
1367 cx: &mut ModelContext<Self>,
1368 ) -> Option<clock::Local>
1369 where
1370 I: IntoIterator<Item = Range<S>>,
1371 S: ToOffset,
1372 T: Into<String>,
1373 {
1374 self.edit_internal(ranges_iter, new_text, false, cx)
1375 }
1376
1377 pub fn edit_with_autoindent<I, S, T>(
1378 &mut self,
1379 ranges_iter: I,
1380 new_text: T,
1381 cx: &mut ModelContext<Self>,
1382 ) -> Option<clock::Local>
1383 where
1384 I: IntoIterator<Item = Range<S>>,
1385 S: ToOffset,
1386 T: Into<String>,
1387 {
1388 self.edit_internal(ranges_iter, new_text, true, cx)
1389 }
1390
1391 pub fn edit_internal<I, S, T>(
1392 &mut self,
1393 ranges_iter: I,
1394 new_text: T,
1395 autoindent: bool,
1396 cx: &mut ModelContext<Self>,
1397 ) -> Option<clock::Local>
1398 where
1399 I: IntoIterator<Item = Range<S>>,
1400 S: ToOffset,
1401 T: Into<String>,
1402 {
1403 let new_text = new_text.into();
1404
1405 // Skip invalid ranges and coalesce contiguous ones.
1406 let mut ranges: Vec<Range<usize>> = Vec::new();
1407 for range in ranges_iter {
1408 let range = range.start.to_offset(self)..range.end.to_offset(self);
1409 if !new_text.is_empty() || !range.is_empty() {
1410 if let Some(prev_range) = ranges.last_mut() {
1411 if prev_range.end >= range.start {
1412 prev_range.end = cmp::max(prev_range.end, range.end);
1413 } else {
1414 ranges.push(range);
1415 }
1416 } else {
1417 ranges.push(range);
1418 }
1419 }
1420 }
1421 if ranges.is_empty() {
1422 return None;
1423 }
1424
1425 self.start_transaction();
1426 self.pending_autoindent.take();
1427 let autoindent_request = if autoindent && self.language.is_some() {
1428 let before_edit = self.snapshot();
1429 let edited = ranges
1430 .iter()
1431 .filter_map(|range| {
1432 let start = range.start.to_point(self);
1433 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1434 None
1435 } else {
1436 Some(self.anchor_before(range.start))
1437 }
1438 })
1439 .collect();
1440 Some((before_edit, edited))
1441 } else {
1442 None
1443 };
1444
1445 let first_newline_ix = new_text.find('\n');
1446 let new_text_len = new_text.len();
1447
1448 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1449 let edit_id = edit.local_timestamp();
1450
1451 if let Some((before_edit, edited)) = autoindent_request {
1452 let mut inserted = None;
1453 if let Some(first_newline_ix) = first_newline_ix {
1454 let mut delta = 0isize;
1455 inserted = Some(
1456 ranges
1457 .iter()
1458 .map(|range| {
1459 let start =
1460 (delta + range.start as isize) as usize + first_newline_ix + 1;
1461 let end = (delta + range.start as isize) as usize + new_text_len;
1462 delta +=
1463 (range.end as isize - range.start as isize) + new_text_len as isize;
1464 self.anchor_before(start)..self.anchor_after(end)
1465 })
1466 .collect(),
1467 );
1468 }
1469
1470 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1471 before_edit,
1472 edited,
1473 inserted,
1474 }));
1475 }
1476
1477 self.end_transaction(cx);
1478 self.send_operation(Operation::Buffer(edit), cx);
1479 Some(edit_id)
1480 }
1481
1482 pub fn edits_from_lsp(
1483 &mut self,
1484 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
1485 version: Option<i32>,
1486 cx: &mut ModelContext<Self>,
1487 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
1488 let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
1489 state
1490 .snapshot_for_version(version as usize)
1491 .map(Clone::clone)
1492 } else {
1493 Ok(TextBuffer::deref(self).clone())
1494 };
1495
1496 cx.background().spawn(async move {
1497 let snapshot = snapshot?;
1498 let mut lsp_edits = lsp_edits
1499 .into_iter()
1500 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
1501 .peekable();
1502
1503 let mut edits = Vec::new();
1504 while let Some((mut range, mut new_text)) = lsp_edits.next() {
1505 // Combine any LSP edits that are adjacent.
1506 //
1507 // Also, combine LSP edits that are separated from each other by only
1508 // a newline. This is important because for some code actions,
1509 // Rust-analyzer rewrites the entire buffer via a series of edits that
1510 // are separated by unchanged newline characters.
1511 //
1512 // In order for the diffing logic below to work properly, any edits that
1513 // cancel each other out must be combined into one.
1514 while let Some((next_range, next_text)) = lsp_edits.peek() {
1515 if next_range.start > range.end {
1516 if next_range.start.row > range.end.row + 1
1517 || next_range.start.column > 0
1518 || snapshot.clip_point_utf16(
1519 PointUtf16::new(range.end.row, u32::MAX),
1520 Bias::Left,
1521 ) > range.end
1522 {
1523 break;
1524 }
1525 new_text.push('\n');
1526 }
1527 range.end = next_range.end;
1528 new_text.push_str(&next_text);
1529 lsp_edits.next();
1530 }
1531
1532 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
1533 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
1534 {
1535 return Err(anyhow!("invalid edits received from language server"));
1536 }
1537
1538 // For multiline edits, perform a diff of the old and new text so that
1539 // we can identify the changes more precisely, preserving the locations
1540 // of any anchors positioned in the unchanged regions.
1541 if range.end.row > range.start.row {
1542 let mut offset = range.start.to_offset(&snapshot);
1543 let old_text = snapshot.text_for_range(range).collect::<String>();
1544
1545 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
1546 let mut moved_since_edit = true;
1547 for change in diff.iter_all_changes() {
1548 let tag = change.tag();
1549 let value = change.value();
1550 match tag {
1551 ChangeTag::Equal => {
1552 offset += value.len();
1553 moved_since_edit = true;
1554 }
1555 ChangeTag::Delete => {
1556 let start = snapshot.anchor_after(offset);
1557 let end = snapshot.anchor_before(offset + value.len());
1558 if moved_since_edit {
1559 edits.push((start..end, String::new()));
1560 } else {
1561 edits.last_mut().unwrap().0.end = end;
1562 }
1563 offset += value.len();
1564 moved_since_edit = false;
1565 }
1566 ChangeTag::Insert => {
1567 if moved_since_edit {
1568 let anchor = snapshot.anchor_after(offset);
1569 edits.push((anchor.clone()..anchor, value.to_string()));
1570 } else {
1571 edits.last_mut().unwrap().1.push_str(value);
1572 }
1573 moved_since_edit = false;
1574 }
1575 }
1576 }
1577 } else if range.end == range.start {
1578 let anchor = snapshot.anchor_after(range.start);
1579 edits.push((anchor.clone()..anchor, new_text));
1580 } else {
1581 let edit_start = snapshot.anchor_after(range.start);
1582 let edit_end = snapshot.anchor_before(range.end);
1583 edits.push((edit_start..edit_end, new_text));
1584 }
1585 }
1586
1587 Ok(edits)
1588 })
1589 }
1590
1591 fn did_edit(
1592 &mut self,
1593 old_version: &clock::Global,
1594 was_dirty: bool,
1595 cx: &mut ModelContext<Self>,
1596 ) {
1597 if self.edits_since::<usize>(old_version).next().is_none() {
1598 return;
1599 }
1600
1601 self.reparse(cx);
1602 self.update_language_server(cx);
1603
1604 cx.emit(Event::Edited);
1605 if !was_dirty {
1606 cx.emit(Event::Dirtied);
1607 }
1608 cx.notify();
1609 }
1610
1611 fn grammar(&self) -> Option<&Arc<Grammar>> {
1612 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1613 }
1614
1615 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1616 &mut self,
1617 ops: I,
1618 cx: &mut ModelContext<Self>,
1619 ) -> Result<()> {
1620 self.pending_autoindent.take();
1621 let was_dirty = self.is_dirty();
1622 let old_version = self.version.clone();
1623 let mut deferred_ops = Vec::new();
1624 let buffer_ops = ops
1625 .into_iter()
1626 .filter_map(|op| match op {
1627 Operation::Buffer(op) => Some(op),
1628 _ => {
1629 if self.can_apply_op(&op) {
1630 self.apply_op(op, cx);
1631 } else {
1632 deferred_ops.push(op);
1633 }
1634 None
1635 }
1636 })
1637 .collect::<Vec<_>>();
1638 self.text.apply_ops(buffer_ops)?;
1639 self.deferred_ops.insert(deferred_ops);
1640 self.flush_deferred_ops(cx);
1641 self.did_edit(&old_version, was_dirty, cx);
1642 // Notify independently of whether the buffer was edited as the operations could include a
1643 // selection update.
1644 cx.notify();
1645 Ok(())
1646 }
1647
1648 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1649 let mut deferred_ops = Vec::new();
1650 for op in self.deferred_ops.drain().iter().cloned() {
1651 if self.can_apply_op(&op) {
1652 self.apply_op(op, cx);
1653 } else {
1654 deferred_ops.push(op);
1655 }
1656 }
1657 self.deferred_ops.insert(deferred_ops);
1658 }
1659
1660 fn can_apply_op(&self, operation: &Operation) -> bool {
1661 match operation {
1662 Operation::Buffer(_) => {
1663 unreachable!("buffer operations should never be applied at this layer")
1664 }
1665 Operation::UpdateDiagnostics {
1666 diagnostics: diagnostic_set,
1667 ..
1668 } => diagnostic_set.iter().all(|diagnostic| {
1669 self.text.can_resolve(&diagnostic.range.start)
1670 && self.text.can_resolve(&diagnostic.range.end)
1671 }),
1672 Operation::UpdateSelections { selections, .. } => selections
1673 .iter()
1674 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1675 Operation::UpdateCompletionTriggers { .. } => true,
1676 }
1677 }
1678
1679 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1680 match operation {
1681 Operation::Buffer(_) => {
1682 unreachable!("buffer operations should never be applied at this layer")
1683 }
1684 Operation::UpdateDiagnostics {
1685 diagnostics: diagnostic_set,
1686 lamport_timestamp,
1687 } => {
1688 let snapshot = self.snapshot();
1689 self.apply_diagnostic_update(
1690 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1691 lamport_timestamp,
1692 cx,
1693 );
1694 }
1695 Operation::UpdateSelections {
1696 selections,
1697 lamport_timestamp,
1698 } => {
1699 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1700 if set.lamport_timestamp > lamport_timestamp {
1701 return;
1702 }
1703 }
1704
1705 self.remote_selections.insert(
1706 lamport_timestamp.replica_id,
1707 SelectionSet {
1708 selections,
1709 lamport_timestamp,
1710 },
1711 );
1712 self.text.lamport_clock.observe(lamport_timestamp);
1713 self.selections_update_count += 1;
1714 }
1715 Operation::UpdateCompletionTriggers {
1716 triggers,
1717 lamport_timestamp,
1718 } => {
1719 self.completion_triggers = triggers;
1720 self.text.lamport_clock.observe(lamport_timestamp);
1721 }
1722 }
1723 }
1724
1725 fn apply_diagnostic_update(
1726 &mut self,
1727 diagnostics: DiagnosticSet,
1728 lamport_timestamp: clock::Lamport,
1729 cx: &mut ModelContext<Self>,
1730 ) {
1731 if lamport_timestamp > self.diagnostics_timestamp {
1732 self.diagnostics = diagnostics;
1733 self.diagnostics_timestamp = lamport_timestamp;
1734 self.diagnostics_update_count += 1;
1735 self.text.lamport_clock.observe(lamport_timestamp);
1736 cx.notify();
1737 cx.emit(Event::DiagnosticsUpdated);
1738 }
1739 }
1740
1741 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1742 cx.emit(Event::Operation(operation));
1743 }
1744
1745 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1746 self.remote_selections.remove(&replica_id);
1747 cx.notify();
1748 }
1749
1750 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1751 let was_dirty = self.is_dirty();
1752 let old_version = self.version.clone();
1753
1754 if let Some((transaction_id, operation)) = self.text.undo() {
1755 self.send_operation(Operation::Buffer(operation), cx);
1756 self.did_edit(&old_version, was_dirty, cx);
1757 Some(transaction_id)
1758 } else {
1759 None
1760 }
1761 }
1762
1763 pub fn undo_to_transaction(
1764 &mut self,
1765 transaction_id: TransactionId,
1766 cx: &mut ModelContext<Self>,
1767 ) -> bool {
1768 let was_dirty = self.is_dirty();
1769 let old_version = self.version.clone();
1770
1771 let operations = self.text.undo_to_transaction(transaction_id);
1772 let undone = !operations.is_empty();
1773 for operation in operations {
1774 self.send_operation(Operation::Buffer(operation), cx);
1775 }
1776 if undone {
1777 self.did_edit(&old_version, was_dirty, cx)
1778 }
1779 undone
1780 }
1781
1782 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1783 let was_dirty = self.is_dirty();
1784 let old_version = self.version.clone();
1785
1786 if let Some((transaction_id, operation)) = self.text.redo() {
1787 self.send_operation(Operation::Buffer(operation), cx);
1788 self.did_edit(&old_version, was_dirty, cx);
1789 Some(transaction_id)
1790 } else {
1791 None
1792 }
1793 }
1794
1795 pub fn redo_to_transaction(
1796 &mut self,
1797 transaction_id: TransactionId,
1798 cx: &mut ModelContext<Self>,
1799 ) -> bool {
1800 let was_dirty = self.is_dirty();
1801 let old_version = self.version.clone();
1802
1803 let operations = self.text.redo_to_transaction(transaction_id);
1804 let redone = !operations.is_empty();
1805 for operation in operations {
1806 self.send_operation(Operation::Buffer(operation), cx);
1807 }
1808 if redone {
1809 self.did_edit(&old_version, was_dirty, cx)
1810 }
1811 redone
1812 }
1813
1814 pub fn completion_triggers(&self) -> &[String] {
1815 &self.completion_triggers
1816 }
1817}
1818
1819#[cfg(any(test, feature = "test-support"))]
1820impl Buffer {
1821 pub fn set_group_interval(&mut self, group_interval: Duration) {
1822 self.text.set_group_interval(group_interval);
1823 }
1824
1825 pub fn randomly_edit<T>(
1826 &mut self,
1827 rng: &mut T,
1828 old_range_count: usize,
1829 cx: &mut ModelContext<Self>,
1830 ) where
1831 T: rand::Rng,
1832 {
1833 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1834 for _ in 0..old_range_count {
1835 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1836 if last_end > self.len() {
1837 break;
1838 }
1839 old_ranges.push(self.text.random_byte_range(last_end, rng));
1840 }
1841 let new_text_len = rng.gen_range(0..10);
1842 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1843 .take(new_text_len)
1844 .collect();
1845 log::info!(
1846 "mutating buffer {} at {:?}: {:?}",
1847 self.replica_id(),
1848 old_ranges,
1849 new_text
1850 );
1851 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1852 }
1853
1854 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1855 let was_dirty = self.is_dirty();
1856 let old_version = self.version.clone();
1857
1858 let ops = self.text.randomly_undo_redo(rng);
1859 if !ops.is_empty() {
1860 for op in ops {
1861 self.send_operation(Operation::Buffer(op), cx);
1862 self.did_edit(&old_version, was_dirty, cx);
1863 }
1864 }
1865 }
1866}
1867
1868impl Entity for Buffer {
1869 type Event = Event;
1870
1871 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1872 if let Some(file) = self.file.as_ref() {
1873 file.buffer_removed(self.remote_id(), cx);
1874 if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) {
1875 let request = lang_server
1876 .server
1877 .notify::<lsp::notification::DidCloseTextDocument>(
1878 lsp::DidCloseTextDocumentParams {
1879 text_document: lsp::TextDocumentIdentifier::new(
1880 lsp::Url::from_file_path(file.abs_path(cx)).unwrap(),
1881 ),
1882 },
1883 );
1884 cx.foreground().spawn(request).detach_and_log_err(cx);
1885 }
1886 }
1887 }
1888}
1889
1890impl Deref for Buffer {
1891 type Target = TextBuffer;
1892
1893 fn deref(&self) -> &Self::Target {
1894 &self.text
1895 }
1896}
1897
1898impl BufferSnapshot {
1899 fn suggest_autoindents<'a>(
1900 &'a self,
1901 row_range: Range<u32>,
1902 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1903 let mut query_cursor = QueryCursorHandle::new();
1904 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1905 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1906
1907 // Get the "indentation ranges" that intersect this row range.
1908 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1909 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1910 query_cursor.set_point_range(
1911 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1912 ..Point::new(row_range.end, 0).to_ts_point(),
1913 );
1914 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1915 for mat in query_cursor.matches(
1916 &grammar.indents_query,
1917 tree.root_node(),
1918 TextProvider(self.as_rope()),
1919 ) {
1920 let mut node_kind = "";
1921 let mut start: Option<Point> = None;
1922 let mut end: Option<Point> = None;
1923 for capture in mat.captures {
1924 if Some(capture.index) == indent_capture_ix {
1925 node_kind = capture.node.kind();
1926 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1927 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1928 } else if Some(capture.index) == end_capture_ix {
1929 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1930 }
1931 }
1932
1933 if let Some((start, end)) = start.zip(end) {
1934 if start.row == end.row {
1935 continue;
1936 }
1937
1938 let range = start..end;
1939 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1940 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1941 Ok(ix) => {
1942 let prev_range = &mut indentation_ranges[ix];
1943 prev_range.0.end = prev_range.0.end.max(range.end);
1944 }
1945 }
1946 }
1947 }
1948
1949 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1950 Some(row_range.map(move |row| {
1951 let row_start = Point::new(row, self.indent_column_for_line(row));
1952
1953 let mut indent_from_prev_row = false;
1954 let mut outdent_to_row = u32::MAX;
1955 for (range, _node_kind) in &indentation_ranges {
1956 if range.start.row >= row {
1957 break;
1958 }
1959
1960 if range.start.row == prev_row && range.end > row_start {
1961 indent_from_prev_row = true;
1962 }
1963 if range.end.row >= prev_row && range.end <= row_start {
1964 outdent_to_row = outdent_to_row.min(range.start.row);
1965 }
1966 }
1967
1968 let suggestion = if outdent_to_row == prev_row {
1969 IndentSuggestion {
1970 basis_row: prev_row,
1971 indent: false,
1972 }
1973 } else if indent_from_prev_row {
1974 IndentSuggestion {
1975 basis_row: prev_row,
1976 indent: true,
1977 }
1978 } else if outdent_to_row < prev_row {
1979 IndentSuggestion {
1980 basis_row: outdent_to_row,
1981 indent: false,
1982 }
1983 } else {
1984 IndentSuggestion {
1985 basis_row: prev_row,
1986 indent: false,
1987 }
1988 };
1989
1990 prev_row = row;
1991 suggestion
1992 }))
1993 } else {
1994 None
1995 }
1996 }
1997
1998 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1999 while row > 0 {
2000 row -= 1;
2001 if !self.is_line_blank(row) {
2002 return Some(row);
2003 }
2004 }
2005 None
2006 }
2007
2008 pub fn chunks<'a, T: ToOffset>(
2009 &'a self,
2010 range: Range<T>,
2011 language_aware: bool,
2012 ) -> BufferChunks<'a> {
2013 let range = range.start.to_offset(self)..range.end.to_offset(self);
2014
2015 let mut tree = None;
2016 let mut diagnostic_endpoints = Vec::new();
2017 if language_aware {
2018 tree = self.tree.as_ref();
2019 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
2020 diagnostic_endpoints.push(DiagnosticEndpoint {
2021 offset: entry.range.start,
2022 is_start: true,
2023 severity: entry.diagnostic.severity,
2024 });
2025 diagnostic_endpoints.push(DiagnosticEndpoint {
2026 offset: entry.range.end,
2027 is_start: false,
2028 severity: entry.diagnostic.severity,
2029 });
2030 }
2031 diagnostic_endpoints
2032 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2033 }
2034
2035 BufferChunks::new(
2036 self.text.as_rope(),
2037 range,
2038 tree,
2039 self.grammar(),
2040 diagnostic_endpoints,
2041 )
2042 }
2043
2044 pub fn language(&self) -> Option<&Arc<Language>> {
2045 self.language.as_ref()
2046 }
2047
2048 fn grammar(&self) -> Option<&Arc<Grammar>> {
2049 self.language
2050 .as_ref()
2051 .and_then(|language| language.grammar.as_ref())
2052 }
2053
2054 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2055 let tree = self.tree.as_ref()?;
2056 let range = range.start.to_offset(self)..range.end.to_offset(self);
2057 let mut cursor = tree.root_node().walk();
2058
2059 // Descend to smallest leaf that touches or exceeds the start of the range.
2060 while cursor.goto_first_child_for_byte(range.start).is_some() {}
2061
2062 // Ascend to the smallest ancestor that strictly contains the range.
2063 loop {
2064 let node_range = cursor.node().byte_range();
2065 if node_range.start <= range.start
2066 && node_range.end >= range.end
2067 && node_range.len() > range.len()
2068 {
2069 break;
2070 }
2071 if !cursor.goto_parent() {
2072 break;
2073 }
2074 }
2075
2076 let left_node = cursor.node();
2077
2078 // For an empty range, try to find another node immediately to the right of the range.
2079 if left_node.end_byte() == range.start {
2080 let mut right_node = None;
2081 while !cursor.goto_next_sibling() {
2082 if !cursor.goto_parent() {
2083 break;
2084 }
2085 }
2086
2087 while cursor.node().start_byte() == range.start {
2088 right_node = Some(cursor.node());
2089 if !cursor.goto_first_child() {
2090 break;
2091 }
2092 }
2093
2094 if let Some(right_node) = right_node {
2095 if right_node.is_named() || !left_node.is_named() {
2096 return Some(right_node.byte_range());
2097 }
2098 }
2099 }
2100
2101 Some(left_node.byte_range())
2102 }
2103
2104 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2105 let tree = self.tree.as_ref()?;
2106 let grammar = self
2107 .language
2108 .as_ref()
2109 .and_then(|language| language.grammar.as_ref())?;
2110
2111 let mut cursor = QueryCursorHandle::new();
2112 let matches = cursor.matches(
2113 &grammar.outline_query,
2114 tree.root_node(),
2115 TextProvider(self.as_rope()),
2116 );
2117
2118 let mut chunks = self.chunks(0..self.len(), true);
2119
2120 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
2121 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
2122 let context_capture_ix = grammar
2123 .outline_query
2124 .capture_index_for_name("context")
2125 .unwrap_or(u32::MAX);
2126
2127 let mut stack = Vec::<Range<usize>>::new();
2128 let items = matches
2129 .filter_map(|mat| {
2130 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
2131 let range = item_node.start_byte()..item_node.end_byte();
2132 let mut text = String::new();
2133 let mut name_ranges = Vec::new();
2134 let mut highlight_ranges = Vec::new();
2135
2136 for capture in mat.captures {
2137 let node_is_name;
2138 if capture.index == name_capture_ix {
2139 node_is_name = true;
2140 } else if capture.index == context_capture_ix {
2141 node_is_name = false;
2142 } else {
2143 continue;
2144 }
2145
2146 let range = capture.node.start_byte()..capture.node.end_byte();
2147 if !text.is_empty() {
2148 text.push(' ');
2149 }
2150 if node_is_name {
2151 let mut start = text.len();
2152 let end = start + range.len();
2153
2154 // When multiple names are captured, then the matcheable text
2155 // includes the whitespace in between the names.
2156 if !name_ranges.is_empty() {
2157 start -= 1;
2158 }
2159
2160 name_ranges.push(start..end);
2161 }
2162
2163 let mut offset = range.start;
2164 chunks.seek(offset);
2165 while let Some(mut chunk) = chunks.next() {
2166 if chunk.text.len() > range.end - offset {
2167 chunk.text = &chunk.text[0..(range.end - offset)];
2168 offset = range.end;
2169 } else {
2170 offset += chunk.text.len();
2171 }
2172 let style = chunk
2173 .highlight_id
2174 .zip(theme)
2175 .and_then(|(highlight, theme)| highlight.style(theme));
2176 if let Some(style) = style {
2177 let start = text.len();
2178 let end = start + chunk.text.len();
2179 highlight_ranges.push((start..end, style));
2180 }
2181 text.push_str(chunk.text);
2182 if offset >= range.end {
2183 break;
2184 }
2185 }
2186 }
2187
2188 while stack.last().map_or(false, |prev_range| {
2189 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
2190 }) {
2191 stack.pop();
2192 }
2193 stack.push(range.clone());
2194
2195 Some(OutlineItem {
2196 depth: stack.len() - 1,
2197 range: self.anchor_after(range.start)..self.anchor_before(range.end),
2198 text,
2199 highlight_ranges,
2200 name_ranges,
2201 })
2202 })
2203 .collect::<Vec<_>>();
2204
2205 if items.is_empty() {
2206 None
2207 } else {
2208 Some(Outline::new(items))
2209 }
2210 }
2211
2212 pub fn enclosing_bracket_ranges<T: ToOffset>(
2213 &self,
2214 range: Range<T>,
2215 ) -> Option<(Range<usize>, Range<usize>)> {
2216 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
2217 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
2218 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
2219
2220 // Find bracket pairs that *inclusively* contain the given range.
2221 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
2222 let mut cursor = QueryCursorHandle::new();
2223 let matches = cursor.set_byte_range(range).matches(
2224 &grammar.brackets_query,
2225 tree.root_node(),
2226 TextProvider(self.as_rope()),
2227 );
2228
2229 // Get the ranges of the innermost pair of brackets.
2230 matches
2231 .filter_map(|mat| {
2232 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
2233 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
2234 Some((open.byte_range(), close.byte_range()))
2235 })
2236 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
2237 }
2238
2239 /*
2240 impl BufferSnapshot
2241 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
2242 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
2243 */
2244
2245 pub fn remote_selections_in_range<'a>(
2246 &'a self,
2247 range: Range<Anchor>,
2248 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
2249 {
2250 self.remote_selections
2251 .iter()
2252 .filter(|(replica_id, set)| {
2253 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2254 })
2255 .map(move |(replica_id, set)| {
2256 let start_ix = match set.selections.binary_search_by(|probe| {
2257 probe
2258 .end
2259 .cmp(&range.start, self)
2260 .unwrap()
2261 .then(Ordering::Greater)
2262 }) {
2263 Ok(ix) | Err(ix) => ix,
2264 };
2265 let end_ix = match set.selections.binary_search_by(|probe| {
2266 probe
2267 .start
2268 .cmp(&range.end, self)
2269 .unwrap()
2270 .then(Ordering::Less)
2271 }) {
2272 Ok(ix) | Err(ix) => ix,
2273 };
2274
2275 (*replica_id, set.selections[start_ix..end_ix].iter())
2276 })
2277 }
2278
2279 pub fn diagnostics_in_range<'a, T, O>(
2280 &'a self,
2281 search_range: Range<T>,
2282 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2283 where
2284 T: 'a + Clone + ToOffset,
2285 O: 'a + FromAnchor,
2286 {
2287 self.diagnostics.range(search_range.clone(), self, true)
2288 }
2289
2290 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2291 let mut groups = Vec::new();
2292 self.diagnostics.groups(&mut groups, self);
2293 groups
2294 }
2295
2296 pub fn diagnostic_group<'a, O>(
2297 &'a self,
2298 group_id: usize,
2299 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2300 where
2301 O: 'a + FromAnchor,
2302 {
2303 self.diagnostics.group(group_id, self)
2304 }
2305
2306 pub fn diagnostics_update_count(&self) -> usize {
2307 self.diagnostics_update_count
2308 }
2309
2310 pub fn parse_count(&self) -> usize {
2311 self.parse_count
2312 }
2313
2314 pub fn selections_update_count(&self) -> usize {
2315 self.selections_update_count
2316 }
2317
2318 pub fn path(&self) -> Option<&Arc<Path>> {
2319 self.path.as_ref()
2320 }
2321
2322 pub fn file_update_count(&self) -> usize {
2323 self.file_update_count
2324 }
2325}
2326
2327impl Clone for BufferSnapshot {
2328 fn clone(&self) -> Self {
2329 Self {
2330 text: self.text.clone(),
2331 tree: self.tree.clone(),
2332 path: self.path.clone(),
2333 remote_selections: self.remote_selections.clone(),
2334 diagnostics: self.diagnostics.clone(),
2335 selections_update_count: self.selections_update_count,
2336 diagnostics_update_count: self.diagnostics_update_count,
2337 file_update_count: self.file_update_count,
2338 is_parsing: self.is_parsing,
2339 language: self.language.clone(),
2340 parse_count: self.parse_count,
2341 }
2342 }
2343}
2344
2345impl Deref for BufferSnapshot {
2346 type Target = text::BufferSnapshot;
2347
2348 fn deref(&self) -> &Self::Target {
2349 &self.text
2350 }
2351}
2352
2353impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2354 type I = ByteChunks<'a>;
2355
2356 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2357 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2358 }
2359}
2360
2361pub(crate) struct ByteChunks<'a>(rope::Chunks<'a>);
2362
2363impl<'a> Iterator for ByteChunks<'a> {
2364 type Item = &'a [u8];
2365
2366 fn next(&mut self) -> Option<Self::Item> {
2367 self.0.next().map(str::as_bytes)
2368 }
2369}
2370
2371unsafe impl<'a> Send for BufferChunks<'a> {}
2372
2373impl<'a> BufferChunks<'a> {
2374 pub(crate) fn new(
2375 text: &'a Rope,
2376 range: Range<usize>,
2377 tree: Option<&'a Tree>,
2378 grammar: Option<&'a Arc<Grammar>>,
2379 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2380 ) -> Self {
2381 let mut highlights = None;
2382 if let Some((grammar, tree)) = grammar.zip(tree) {
2383 let mut query_cursor = QueryCursorHandle::new();
2384
2385 // TODO - add a Tree-sitter API to remove the need for this.
2386 let cursor = unsafe {
2387 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
2388 };
2389 let captures = cursor.set_byte_range(range.clone()).captures(
2390 &grammar.highlights_query,
2391 tree.root_node(),
2392 TextProvider(text),
2393 );
2394 highlights = Some(BufferChunkHighlights {
2395 captures,
2396 next_capture: None,
2397 stack: Default::default(),
2398 highlight_map: grammar.highlight_map(),
2399 _query_cursor: query_cursor,
2400 })
2401 }
2402
2403 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2404 let chunks = text.chunks_in_range(range.clone());
2405
2406 BufferChunks {
2407 range,
2408 chunks,
2409 diagnostic_endpoints,
2410 error_depth: 0,
2411 warning_depth: 0,
2412 information_depth: 0,
2413 hint_depth: 0,
2414 highlights,
2415 }
2416 }
2417
2418 pub fn seek(&mut self, offset: usize) {
2419 self.range.start = offset;
2420 self.chunks.seek(self.range.start);
2421 if let Some(highlights) = self.highlights.as_mut() {
2422 highlights
2423 .stack
2424 .retain(|(end_offset, _)| *end_offset > offset);
2425 if let Some((mat, capture_ix)) = &highlights.next_capture {
2426 let capture = mat.captures[*capture_ix as usize];
2427 if offset >= capture.node.start_byte() {
2428 let next_capture_end = capture.node.end_byte();
2429 if offset < next_capture_end {
2430 highlights.stack.push((
2431 next_capture_end,
2432 highlights.highlight_map.get(capture.index),
2433 ));
2434 }
2435 highlights.next_capture.take();
2436 }
2437 }
2438 highlights.captures.set_byte_range(self.range.clone());
2439 }
2440 }
2441
2442 pub fn offset(&self) -> usize {
2443 self.range.start
2444 }
2445
2446 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2447 let depth = match endpoint.severity {
2448 DiagnosticSeverity::ERROR => &mut self.error_depth,
2449 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2450 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2451 DiagnosticSeverity::HINT => &mut self.hint_depth,
2452 _ => return,
2453 };
2454 if endpoint.is_start {
2455 *depth += 1;
2456 } else {
2457 *depth -= 1;
2458 }
2459 }
2460
2461 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2462 if self.error_depth > 0 {
2463 Some(DiagnosticSeverity::ERROR)
2464 } else if self.warning_depth > 0 {
2465 Some(DiagnosticSeverity::WARNING)
2466 } else if self.information_depth > 0 {
2467 Some(DiagnosticSeverity::INFORMATION)
2468 } else if self.hint_depth > 0 {
2469 Some(DiagnosticSeverity::HINT)
2470 } else {
2471 None
2472 }
2473 }
2474}
2475
2476impl<'a> Iterator for BufferChunks<'a> {
2477 type Item = Chunk<'a>;
2478
2479 fn next(&mut self) -> Option<Self::Item> {
2480 let mut next_capture_start = usize::MAX;
2481 let mut next_diagnostic_endpoint = usize::MAX;
2482
2483 if let Some(highlights) = self.highlights.as_mut() {
2484 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2485 if *parent_capture_end <= self.range.start {
2486 highlights.stack.pop();
2487 } else {
2488 break;
2489 }
2490 }
2491
2492 if highlights.next_capture.is_none() {
2493 highlights.next_capture = highlights.captures.next();
2494 }
2495
2496 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2497 let capture = mat.captures[*capture_ix as usize];
2498 if self.range.start < capture.node.start_byte() {
2499 next_capture_start = capture.node.start_byte();
2500 break;
2501 } else {
2502 let highlight_id = highlights.highlight_map.get(capture.index);
2503 highlights
2504 .stack
2505 .push((capture.node.end_byte(), highlight_id));
2506 highlights.next_capture = highlights.captures.next();
2507 }
2508 }
2509 }
2510
2511 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2512 if endpoint.offset <= self.range.start {
2513 self.update_diagnostic_depths(endpoint);
2514 self.diagnostic_endpoints.next();
2515 } else {
2516 next_diagnostic_endpoint = endpoint.offset;
2517 break;
2518 }
2519 }
2520
2521 if let Some(chunk) = self.chunks.peek() {
2522 let chunk_start = self.range.start;
2523 let mut chunk_end = (self.chunks.offset() + chunk.len())
2524 .min(next_capture_start)
2525 .min(next_diagnostic_endpoint);
2526 let mut highlight_id = None;
2527 if let Some(highlights) = self.highlights.as_ref() {
2528 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2529 chunk_end = chunk_end.min(*parent_capture_end);
2530 highlight_id = Some(*parent_highlight_id);
2531 }
2532 }
2533
2534 let slice =
2535 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2536 self.range.start = chunk_end;
2537 if self.range.start == self.chunks.offset() + chunk.len() {
2538 self.chunks.next().unwrap();
2539 }
2540
2541 Some(Chunk {
2542 text: slice,
2543 highlight_id,
2544 diagnostic: self.current_diagnostic_severity(),
2545 })
2546 } else {
2547 None
2548 }
2549 }
2550}
2551
2552impl QueryCursorHandle {
2553 pub(crate) fn new() -> Self {
2554 QueryCursorHandle(Some(
2555 QUERY_CURSORS
2556 .lock()
2557 .pop()
2558 .unwrap_or_else(|| QueryCursor::new()),
2559 ))
2560 }
2561}
2562
2563impl Deref for QueryCursorHandle {
2564 type Target = QueryCursor;
2565
2566 fn deref(&self) -> &Self::Target {
2567 self.0.as_ref().unwrap()
2568 }
2569}
2570
2571impl DerefMut for QueryCursorHandle {
2572 fn deref_mut(&mut self) -> &mut Self::Target {
2573 self.0.as_mut().unwrap()
2574 }
2575}
2576
2577impl Drop for QueryCursorHandle {
2578 fn drop(&mut self) {
2579 let mut cursor = self.0.take().unwrap();
2580 cursor.set_byte_range(0..usize::MAX);
2581 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2582 QUERY_CURSORS.lock().push(cursor)
2583 }
2584}
2585
2586trait ToTreeSitterPoint {
2587 fn to_ts_point(self) -> tree_sitter::Point;
2588 fn from_ts_point(point: tree_sitter::Point) -> Self;
2589}
2590
2591impl ToTreeSitterPoint for Point {
2592 fn to_ts_point(self) -> tree_sitter::Point {
2593 tree_sitter::Point::new(self.row as usize, self.column as usize)
2594 }
2595
2596 fn from_ts_point(point: tree_sitter::Point) -> Self {
2597 Point::new(point.row as u32, point.column as u32)
2598 }
2599}
2600
2601impl operation_queue::Operation for Operation {
2602 fn lamport_timestamp(&self) -> clock::Lamport {
2603 match self {
2604 Operation::Buffer(_) => {
2605 unreachable!("buffer operations should never be deferred at this layer")
2606 }
2607 Operation::UpdateDiagnostics {
2608 lamport_timestamp, ..
2609 }
2610 | Operation::UpdateSelections {
2611 lamport_timestamp, ..
2612 }
2613 | Operation::UpdateCompletionTriggers {
2614 lamport_timestamp, ..
2615 } => *lamport_timestamp,
2616 }
2617 }
2618}
2619
2620impl LanguageServerState {
2621 fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> {
2622 const OLD_VERSIONS_TO_RETAIN: usize = 10;
2623
2624 self.pending_snapshots
2625 .retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version);
2626 let snapshot = self
2627 .pending_snapshots
2628 .get(&version)
2629 .ok_or_else(|| anyhow!("missing snapshot"))?;
2630 Ok(&snapshot.buffer_snapshot)
2631 }
2632}
2633
2634impl Default for Diagnostic {
2635 fn default() -> Self {
2636 Self {
2637 code: Default::default(),
2638 severity: DiagnosticSeverity::ERROR,
2639 message: Default::default(),
2640 group_id: Default::default(),
2641 is_primary: Default::default(),
2642 is_valid: true,
2643 is_disk_based: false,
2644 }
2645 }
2646}
2647
2648impl Completion {
2649 pub fn sort_key(&self) -> (usize, &str) {
2650 let kind_key = match self.lsp_completion.kind {
2651 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2652 _ => 1,
2653 };
2654 (kind_key, &self.label.text[self.label.filter_range.clone()])
2655 }
2656
2657 pub fn is_snippet(&self) -> bool {
2658 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2659 }
2660}
2661
2662pub fn contiguous_ranges(
2663 values: impl Iterator<Item = u32>,
2664 max_len: usize,
2665) -> impl Iterator<Item = Range<u32>> {
2666 let mut values = values.into_iter();
2667 let mut current_range: Option<Range<u32>> = None;
2668 std::iter::from_fn(move || loop {
2669 if let Some(value) = values.next() {
2670 if let Some(range) = &mut current_range {
2671 if value == range.end && range.len() < max_len {
2672 range.end += 1;
2673 continue;
2674 }
2675 }
2676
2677 let prev_range = current_range.clone();
2678 current_range = Some(value..(value + 1));
2679 if prev_range.is_some() {
2680 return prev_range;
2681 }
2682 } else {
2683 return current_range.take();
2684 }
2685 })
2686}
2687
2688pub fn char_kind(c: char) -> CharKind {
2689 if c == '\n' {
2690 CharKind::Newline
2691 } else if c.is_whitespace() {
2692 CharKind::Whitespace
2693 } else if c.is_alphanumeric() || c == '_' {
2694 CharKind::Word
2695 } else {
2696 CharKind::Punctuation
2697 }
2698}