1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, CodeLabel, Outline, ToLspPosition,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cmp::{self, Ordering},
25 collections::{BTreeMap, HashMap},
26 ffi::OsString,
27 future::Future,
28 iter::{Iterator, Peekable},
29 ops::{Deref, DerefMut, Range, Sub},
30 path::{Path, PathBuf},
31 str,
32 sync::Arc,
33 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
34 vec,
35};
36use sum_tree::TreeMap;
37use text::{operation_queue::OperationQueue, rope::TextDimension};
38pub use text::{Buffer as TextBuffer, Operation as _, *};
39use theme::SyntaxTheme;
40use tree_sitter::{InputEdit, QueryCursor, Tree};
41use util::{post_inc, TryFutureExt as _};
42
43#[cfg(any(test, feature = "test-support"))]
44pub use tree_sitter_rust;
45
46pub use lsp::DiagnosticSeverity;
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 remote_selections: TreeMap<ReplicaId, SelectionSet>,
69 selections_update_count: usize,
70 diagnostics_update_count: usize,
71 diagnostics_timestamp: clock::Lamport,
72 file_update_count: usize,
73 language_server: Option<LanguageServerState>,
74 completion_triggers: Vec<String>,
75 deferred_ops: OperationQueue<Operation>,
76 #[cfg(test)]
77 pub(crate) operations: Vec<Operation>,
78}
79
80pub struct BufferSnapshot {
81 text: text::BufferSnapshot,
82 tree: Option<Tree>,
83 path: Option<Arc<Path>>,
84 diagnostics: DiagnosticSet,
85 diagnostics_update_count: usize,
86 file_update_count: usize,
87 remote_selections: TreeMap<ReplicaId, SelectionSet>,
88 selections_update_count: usize,
89 is_parsing: bool,
90 language: Option<Arc<Language>>,
91 parse_count: usize,
92}
93
94#[derive(Clone, Debug)]
95struct SelectionSet {
96 selections: Arc<[Selection<Anchor>]>,
97 lamport_timestamp: clock::Lamport,
98}
99
100#[derive(Clone, Debug, PartialEq, Eq)]
101pub struct GroupId {
102 source: Arc<str>,
103 id: usize,
104}
105
106#[derive(Clone, Debug, PartialEq, Eq)]
107pub struct Diagnostic {
108 pub code: Option<String>,
109 pub severity: DiagnosticSeverity,
110 pub message: String,
111 pub group_id: usize,
112 pub is_valid: bool,
113 pub is_primary: bool,
114 pub is_disk_based: bool,
115}
116
117#[derive(Clone, Debug)]
118pub struct Completion {
119 pub old_range: Range<Anchor>,
120 pub new_text: String,
121 pub label: CodeLabel,
122 pub lsp_completion: lsp::CompletionItem,
123}
124
125#[derive(Clone, Debug)]
126pub struct CodeAction {
127 pub range: Range<Anchor>,
128 pub lsp_action: lsp::CodeAction,
129}
130
131struct LanguageServerState {
132 server: Arc<LanguageServer>,
133 latest_snapshot: watch::Sender<LanguageServerSnapshot>,
134 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
135 next_version: usize,
136 _maintain_server: Task<Option<()>>,
137}
138
139#[derive(Clone)]
140struct LanguageServerSnapshot {
141 buffer_snapshot: text::BufferSnapshot,
142 version: usize,
143 path: Arc<Path>,
144}
145
146#[derive(Clone, Debug)]
147pub enum Operation {
148 Buffer(text::Operation),
149 UpdateDiagnostics {
150 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
151 lamport_timestamp: clock::Lamport,
152 },
153 UpdateSelections {
154 selections: Arc<[Selection<Anchor>]>,
155 lamport_timestamp: clock::Lamport,
156 },
157 UpdateCompletionTriggers {
158 triggers: Vec<String>,
159 lamport_timestamp: clock::Lamport,
160 },
161}
162
163#[derive(Clone, Debug, Eq, PartialEq)]
164pub enum Event {
165 Edited,
166 Dirtied,
167 Saved,
168 FileHandleChanged,
169 Reloaded,
170 Reparsed,
171 DiagnosticsUpdated,
172 Closed,
173}
174
175pub trait File {
176 fn as_local(&self) -> Option<&dyn LocalFile>;
177
178 fn is_local(&self) -> bool {
179 self.as_local().is_some()
180 }
181
182 fn mtime(&self) -> SystemTime;
183
184 /// Returns the path of this file relative to the worktree's root directory.
185 fn path(&self) -> &Arc<Path>;
186
187 /// Returns the path of this file relative to the worktree's parent directory (this means it
188 /// includes the name of the worktree's root folder).
189 fn full_path(&self, cx: &AppContext) -> PathBuf;
190
191 /// Returns the last component of this handle's absolute path. If this handle refers to the root
192 /// of its worktree, then this method will return the name of the worktree itself.
193 fn file_name(&self, cx: &AppContext) -> OsString;
194
195 fn is_deleted(&self) -> bool;
196
197 fn save(
198 &self,
199 buffer_id: u64,
200 text: Rope,
201 version: clock::Global,
202 cx: &mut MutableAppContext,
203 ) -> Task<Result<(clock::Global, SystemTime)>>;
204
205 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
206
207 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
208
209 fn as_any(&self) -> &dyn Any;
210
211 fn to_proto(&self) -> rpc::proto::File;
212}
213
214pub trait LocalFile: File {
215 /// Returns the absolute path of this file.
216 fn abs_path(&self, cx: &AppContext) -> PathBuf;
217
218 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
219
220 fn buffer_reloaded(
221 &self,
222 buffer_id: u64,
223 version: &clock::Global,
224 mtime: SystemTime,
225 cx: &mut MutableAppContext,
226 );
227}
228
229#[cfg(any(test, feature = "test-support"))]
230pub struct FakeFile {
231 pub path: Arc<Path>,
232}
233
234#[cfg(any(test, feature = "test-support"))]
235impl FakeFile {
236 pub fn new(path: impl AsRef<Path>) -> Self {
237 Self {
238 path: path.as_ref().into(),
239 }
240 }
241}
242
243#[cfg(any(test, feature = "test-support"))]
244impl File for FakeFile {
245 fn as_local(&self) -> Option<&dyn LocalFile> {
246 Some(self)
247 }
248
249 fn mtime(&self) -> SystemTime {
250 SystemTime::UNIX_EPOCH
251 }
252
253 fn path(&self) -> &Arc<Path> {
254 &self.path
255 }
256
257 fn full_path(&self, _: &AppContext) -> PathBuf {
258 self.path.to_path_buf()
259 }
260
261 fn file_name(&self, _: &AppContext) -> OsString {
262 self.path.file_name().unwrap().to_os_string()
263 }
264
265 fn is_deleted(&self) -> bool {
266 false
267 }
268
269 fn save(
270 &self,
271 _: u64,
272 _: Rope,
273 _: clock::Global,
274 cx: &mut MutableAppContext,
275 ) -> Task<Result<(clock::Global, SystemTime)>> {
276 cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
277 }
278
279 fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
280
281 fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
282
283 fn as_any(&self) -> &dyn Any {
284 self
285 }
286
287 fn to_proto(&self) -> rpc::proto::File {
288 unimplemented!()
289 }
290}
291
292#[cfg(any(test, feature = "test-support"))]
293impl LocalFile for FakeFile {
294 fn abs_path(&self, _: &AppContext) -> PathBuf {
295 self.path.to_path_buf()
296 }
297
298 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
299 cx.background().spawn(async move { Ok(Default::default()) })
300 }
301
302 fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
303 }
304}
305
306pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
307
308#[derive(Clone)]
309struct SyntaxTree {
310 tree: Tree,
311 version: clock::Global,
312}
313
314#[derive(Clone)]
315struct AutoindentRequest {
316 before_edit: BufferSnapshot,
317 edited: Vec<Anchor>,
318 inserted: Option<Vec<Range<Anchor>>>,
319}
320
321#[derive(Debug)]
322struct IndentSuggestion {
323 basis_row: u32,
324 indent: bool,
325}
326
327pub(crate) struct TextProvider<'a>(pub(crate) &'a Rope);
328
329struct BufferChunkHighlights<'a> {
330 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
331 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
332 stack: Vec<(usize, HighlightId)>,
333 highlight_map: HighlightMap,
334 _query_cursor: QueryCursorHandle,
335}
336
337pub struct BufferChunks<'a> {
338 range: Range<usize>,
339 chunks: rope::Chunks<'a>,
340 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
341 error_depth: usize,
342 warning_depth: usize,
343 information_depth: usize,
344 hint_depth: usize,
345 highlights: Option<BufferChunkHighlights<'a>>,
346}
347
348#[derive(Clone, Copy, Debug, Default)]
349pub struct Chunk<'a> {
350 pub text: &'a str,
351 pub highlight_id: Option<HighlightId>,
352 pub diagnostic: Option<DiagnosticSeverity>,
353}
354
355pub(crate) struct Diff {
356 base_version: clock::Global,
357 new_text: Arc<str>,
358 changes: Vec<(ChangeTag, usize)>,
359 start_offset: usize,
360}
361
362#[derive(Clone, Copy)]
363pub(crate) struct DiagnosticEndpoint {
364 offset: usize,
365 is_start: bool,
366 severity: DiagnosticSeverity,
367}
368
369#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
370pub enum CharKind {
371 Newline,
372 Punctuation,
373 Whitespace,
374 Word,
375}
376
377impl Buffer {
378 pub fn new<T: Into<Arc<str>>>(
379 replica_id: ReplicaId,
380 base_text: T,
381 cx: &mut ModelContext<Self>,
382 ) -> Self {
383 Self::build(
384 TextBuffer::new(
385 replica_id,
386 cx.model_id() as u64,
387 History::new(base_text.into()),
388 ),
389 None,
390 )
391 }
392
393 pub fn from_file<T: Into<Arc<str>>>(
394 replica_id: ReplicaId,
395 base_text: T,
396 file: Box<dyn File>,
397 cx: &mut ModelContext<Self>,
398 ) -> Self {
399 Self::build(
400 TextBuffer::new(
401 replica_id,
402 cx.model_id() as u64,
403 History::new(base_text.into()),
404 ),
405 Some(file),
406 )
407 }
408
409 pub fn from_proto(
410 replica_id: ReplicaId,
411 message: proto::BufferState,
412 file: Option<Box<dyn File>>,
413 cx: &mut ModelContext<Self>,
414 ) -> Result<Self> {
415 let buffer = TextBuffer::new(
416 replica_id,
417 message.id,
418 History::new(Arc::from(message.base_text)),
419 );
420 let mut this = Self::build(buffer, file);
421 let ops = message
422 .operations
423 .into_iter()
424 .map(proto::deserialize_operation)
425 .collect::<Result<Vec<_>>>()?;
426 this.apply_ops(ops, cx)?;
427
428 for selection_set in message.selections {
429 let lamport_timestamp = clock::Lamport {
430 replica_id: selection_set.replica_id as ReplicaId,
431 value: selection_set.lamport_timestamp,
432 };
433 this.remote_selections.insert(
434 selection_set.replica_id as ReplicaId,
435 SelectionSet {
436 selections: proto::deserialize_selections(selection_set.selections),
437 lamport_timestamp,
438 },
439 );
440 this.text.lamport_clock.observe(lamport_timestamp);
441 }
442 let snapshot = this.snapshot();
443 let entries = proto::deserialize_diagnostics(message.diagnostics);
444 this.apply_diagnostic_update(
445 DiagnosticSet::from_sorted_entries(entries.iter().cloned(), &snapshot),
446 clock::Lamport {
447 replica_id: 0,
448 value: message.diagnostics_timestamp,
449 },
450 cx,
451 );
452
453 this.completion_triggers = message.completion_triggers;
454
455 Ok(this)
456 }
457
458 pub fn to_proto(&self) -> proto::BufferState {
459 let mut operations = self
460 .text
461 .history()
462 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone())))
463 .chain(self.deferred_ops.iter().map(proto::serialize_operation))
464 .collect::<Vec<_>>();
465 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
466 proto::BufferState {
467 id: self.remote_id(),
468 file: self.file.as_ref().map(|f| f.to_proto()),
469 base_text: self.base_text().to_string(),
470 operations,
471 selections: self
472 .remote_selections
473 .iter()
474 .map(|(replica_id, set)| proto::SelectionSet {
475 replica_id: *replica_id as u32,
476 selections: proto::serialize_selections(&set.selections),
477 lamport_timestamp: set.lamport_timestamp.value,
478 })
479 .collect(),
480 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
481 diagnostics_timestamp: self.diagnostics_timestamp.value,
482 completion_triggers: self.completion_triggers.clone(),
483 }
484 }
485
486 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
487 self.set_language(Some(language), cx);
488 self
489 }
490
491 pub fn with_language_server(
492 mut self,
493 server: Arc<LanguageServer>,
494 cx: &mut ModelContext<Self>,
495 ) -> Self {
496 self.set_language_server(Some(server), cx);
497 self
498 }
499
500 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
501 let saved_mtime;
502 if let Some(file) = file.as_ref() {
503 saved_mtime = file.mtime();
504 } else {
505 saved_mtime = UNIX_EPOCH;
506 }
507
508 Self {
509 saved_mtime,
510 saved_version: buffer.version(),
511 text: buffer,
512 file,
513 syntax_tree: Mutex::new(None),
514 parsing_in_background: false,
515 parse_count: 0,
516 sync_parse_timeout: Duration::from_millis(1),
517 autoindent_requests: Default::default(),
518 pending_autoindent: Default::default(),
519 language: None,
520 remote_selections: Default::default(),
521 selections_update_count: 0,
522 diagnostics: Default::default(),
523 diagnostics_update_count: 0,
524 diagnostics_timestamp: Default::default(),
525 file_update_count: 0,
526 language_server: None,
527 completion_triggers: Default::default(),
528 deferred_ops: OperationQueue::new(),
529 #[cfg(test)]
530 operations: Default::default(),
531 }
532 }
533
534 pub fn snapshot(&self) -> BufferSnapshot {
535 BufferSnapshot {
536 text: self.text.snapshot(),
537 tree: self.syntax_tree(),
538 path: self.file.as_ref().map(|f| f.path().clone()),
539 remote_selections: self.remote_selections.clone(),
540 diagnostics: self.diagnostics.clone(),
541 diagnostics_update_count: self.diagnostics_update_count,
542 file_update_count: self.file_update_count,
543 is_parsing: self.parsing_in_background,
544 language: self.language.clone(),
545 parse_count: self.parse_count,
546 selections_update_count: self.selections_update_count,
547 }
548 }
549
550 pub fn file(&self) -> Option<&dyn File> {
551 self.file.as_deref()
552 }
553
554 pub fn save(
555 &mut self,
556 cx: &mut ModelContext<Self>,
557 ) -> Task<Result<(clock::Global, SystemTime)>> {
558 let file = if let Some(file) = self.file.as_ref() {
559 file
560 } else {
561 return Task::ready(Err(anyhow!("buffer has no file")));
562 };
563 let text = self.as_rope().clone();
564 let version = self.version();
565 let save = file.save(self.remote_id(), text, version, cx.as_mut());
566 cx.spawn(|this, mut cx| async move {
567 let (version, mtime) = save.await?;
568 this.update(&mut cx, |this, cx| {
569 this.did_save(version.clone(), mtime, None, cx);
570 });
571 Ok((version, mtime))
572 })
573 }
574
575 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
576 self.language = language;
577 self.reparse(cx);
578 }
579
580 pub fn set_language_server(
581 &mut self,
582 language_server: Option<Arc<lsp::LanguageServer>>,
583 cx: &mut ModelContext<Self>,
584 ) {
585 self.language_server = if let Some((server, file)) =
586 language_server.zip(self.file.as_ref().and_then(|f| f.as_local()))
587 {
588 let initial_snapshot = LanguageServerSnapshot {
589 buffer_snapshot: self.text.snapshot(),
590 version: 0,
591 path: file.abs_path(cx).into(),
592 };
593 let (latest_snapshot_tx, mut latest_snapshot_rx) =
594 watch::channel_with::<LanguageServerSnapshot>(initial_snapshot.clone());
595
596 Some(LanguageServerState {
597 latest_snapshot: latest_snapshot_tx,
598 pending_snapshots: BTreeMap::from_iter([(0, initial_snapshot)]),
599 next_version: 1,
600 server: server.clone(),
601 _maintain_server: cx.spawn_weak(|this, mut cx| async move {
602 let capabilities = server.capabilities().await.or_else(|| {
603 log::info!("language server exited");
604 if let Some(this) = this.upgrade(&cx) {
605 this.update(&mut cx, |this, _| this.language_server = None);
606 }
607 None
608 })?;
609
610 let triggers = capabilities
611 .completion_provider
612 .and_then(|c| c.trigger_characters)
613 .unwrap_or_default();
614 this.upgrade(&cx)?.update(&mut cx, |this, cx| {
615 let lamport_timestamp = this.text.lamport_clock.tick();
616 this.completion_triggers = triggers.clone();
617 this.send_operation(
618 Operation::UpdateCompletionTriggers {
619 triggers,
620 lamport_timestamp,
621 },
622 cx,
623 );
624 cx.notify();
625 });
626
627 let maintain_changes = cx.background().spawn(async move {
628 let initial_snapshot =
629 latest_snapshot_rx.recv().await.ok_or_else(|| {
630 anyhow!("buffer dropped before sending DidOpenTextDocument")
631 })?;
632 server
633 .notify::<lsp::notification::DidOpenTextDocument>(
634 lsp::DidOpenTextDocumentParams {
635 text_document: lsp::TextDocumentItem::new(
636 lsp::Url::from_file_path(initial_snapshot.path).unwrap(),
637 Default::default(),
638 initial_snapshot.version as i32,
639 initial_snapshot.buffer_snapshot.text(),
640 ),
641 },
642 )
643 .await?;
644
645 let mut prev_version = initial_snapshot.buffer_snapshot.version().clone();
646 while let Some(snapshot) = latest_snapshot_rx.recv().await {
647 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
648 let buffer_snapshot = snapshot.buffer_snapshot.clone();
649 let content_changes = buffer_snapshot
650 .edits_since::<(PointUtf16, usize)>(&prev_version)
651 .map(|edit| {
652 let edit_start = edit.new.start.0;
653 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
654 let new_text = buffer_snapshot
655 .text_for_range(edit.new.start.1..edit.new.end.1)
656 .collect();
657 lsp::TextDocumentContentChangeEvent {
658 range: Some(lsp::Range::new(
659 edit_start.to_lsp_position(),
660 edit_end.to_lsp_position(),
661 )),
662 range_length: None,
663 text: new_text,
664 }
665 })
666 .collect();
667 let changes = lsp::DidChangeTextDocumentParams {
668 text_document: lsp::VersionedTextDocumentIdentifier::new(
669 uri,
670 snapshot.version as i32,
671 ),
672 content_changes,
673 };
674 server
675 .notify::<lsp::notification::DidChangeTextDocument>(changes)
676 .await?;
677
678 prev_version = snapshot.buffer_snapshot.version().clone();
679 }
680
681 Ok::<_, anyhow::Error>(())
682 });
683
684 maintain_changes.log_err().await
685 }),
686 })
687 } else {
688 None
689 };
690 }
691
692 pub fn did_save(
693 &mut self,
694 version: clock::Global,
695 mtime: SystemTime,
696 new_file: Option<Box<dyn File>>,
697 cx: &mut ModelContext<Self>,
698 ) {
699 self.saved_mtime = mtime;
700 self.saved_version = version;
701 if let Some(new_file) = new_file {
702 self.file = Some(new_file);
703 self.file_update_count += 1;
704 }
705 if let Some((state, local_file)) = &self
706 .language_server
707 .as_ref()
708 .zip(self.file.as_ref().and_then(|f| f.as_local()))
709 {
710 cx.background()
711 .spawn(
712 state
713 .server
714 .notify::<lsp::notification::DidSaveTextDocument>(
715 lsp::DidSaveTextDocumentParams {
716 text_document: lsp::TextDocumentIdentifier {
717 uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
718 },
719 text: None,
720 },
721 ),
722 )
723 .detach()
724 }
725 cx.emit(Event::Saved);
726 cx.notify();
727 }
728
729 pub fn did_reload(
730 &mut self,
731 version: clock::Global,
732 mtime: SystemTime,
733 cx: &mut ModelContext<Self>,
734 ) {
735 self.saved_mtime = mtime;
736 self.saved_version = version;
737 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
738 file.buffer_reloaded(self.remote_id(), &self.saved_version, self.saved_mtime, cx);
739 }
740 cx.emit(Event::Reloaded);
741 cx.notify();
742 }
743
744 pub fn file_updated(
745 &mut self,
746 new_file: Box<dyn File>,
747 cx: &mut ModelContext<Self>,
748 ) -> Task<()> {
749 let old_file = if let Some(file) = self.file.as_ref() {
750 file
751 } else {
752 return Task::ready(());
753 };
754 let mut file_changed = false;
755 let mut task = Task::ready(());
756
757 if new_file.path() != old_file.path() {
758 file_changed = true;
759 }
760
761 if new_file.is_deleted() {
762 if !old_file.is_deleted() {
763 file_changed = true;
764 if !self.is_dirty() {
765 cx.emit(Event::Dirtied);
766 }
767 }
768 } else {
769 let new_mtime = new_file.mtime();
770 if new_mtime != old_file.mtime() {
771 file_changed = true;
772
773 if !self.is_dirty() {
774 task = cx.spawn(|this, mut cx| {
775 async move {
776 let new_text = this.read_with(&cx, |this, cx| {
777 this.file
778 .as_ref()
779 .and_then(|file| file.as_local().map(|f| f.load(cx)))
780 });
781 if let Some(new_text) = new_text {
782 let new_text = new_text.await?;
783 let diff = this
784 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
785 .await;
786 this.update(&mut cx, |this, cx| {
787 if this.apply_diff(diff, cx) {
788 this.did_reload(this.version(), new_mtime, cx);
789 }
790 });
791 }
792 Ok(())
793 }
794 .log_err()
795 .map(drop)
796 });
797 }
798 }
799 }
800
801 if file_changed {
802 self.file_update_count += 1;
803 cx.emit(Event::FileHandleChanged);
804 cx.notify();
805 }
806 self.file = Some(new_file);
807 task
808 }
809
810 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
811 cx.emit(Event::Closed);
812 }
813
814 pub fn language(&self) -> Option<&Arc<Language>> {
815 self.language.as_ref()
816 }
817
818 pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
819 self.language_server.as_ref().map(|state| &state.server)
820 }
821
822 pub fn parse_count(&self) -> usize {
823 self.parse_count
824 }
825
826 pub fn selections_update_count(&self) -> usize {
827 self.selections_update_count
828 }
829
830 pub fn diagnostics_update_count(&self) -> usize {
831 self.diagnostics_update_count
832 }
833
834 pub fn file_update_count(&self) -> usize {
835 self.file_update_count
836 }
837
838 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
839 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
840 self.interpolate_tree(syntax_tree);
841 Some(syntax_tree.tree.clone())
842 } else {
843 None
844 }
845 }
846
847 #[cfg(any(test, feature = "test-support"))]
848 pub fn is_parsing(&self) -> bool {
849 self.parsing_in_background
850 }
851
852 #[cfg(test)]
853 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
854 self.sync_parse_timeout = timeout;
855 }
856
857 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
858 if self.parsing_in_background {
859 return false;
860 }
861
862 if let Some(grammar) = self.grammar().cloned() {
863 let old_tree = self.syntax_tree();
864 let text = self.as_rope().clone();
865 let parsed_version = self.version();
866 let parse_task = cx.background().spawn({
867 let grammar = grammar.clone();
868 async move { grammar.parse_text(&text, old_tree) }
869 });
870
871 match cx
872 .background()
873 .block_with_timeout(self.sync_parse_timeout, parse_task)
874 {
875 Ok(new_tree) => {
876 self.did_finish_parsing(new_tree, parsed_version, cx);
877 return true;
878 }
879 Err(parse_task) => {
880 self.parsing_in_background = true;
881 cx.spawn(move |this, mut cx| async move {
882 let new_tree = parse_task.await;
883 this.update(&mut cx, move |this, cx| {
884 let grammar_changed = this
885 .grammar()
886 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
887 let parse_again =
888 this.version.changed_since(&parsed_version) || grammar_changed;
889 this.parsing_in_background = false;
890 this.did_finish_parsing(new_tree, parsed_version, cx);
891
892 if parse_again && this.reparse(cx) {
893 return;
894 }
895 });
896 })
897 .detach();
898 }
899 }
900 }
901 false
902 }
903
904 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
905 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
906 let (bytes, lines) = edit.flatten();
907 tree.tree.edit(&InputEdit {
908 start_byte: bytes.new.start,
909 old_end_byte: bytes.new.start + bytes.old.len(),
910 new_end_byte: bytes.new.end,
911 start_position: lines.new.start.to_ts_point(),
912 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
913 .to_ts_point(),
914 new_end_position: lines.new.end.to_ts_point(),
915 });
916 }
917 tree.version = self.version();
918 }
919
920 fn did_finish_parsing(
921 &mut self,
922 tree: Tree,
923 version: clock::Global,
924 cx: &mut ModelContext<Self>,
925 ) {
926 self.parse_count += 1;
927 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
928 self.request_autoindent(cx);
929 cx.emit(Event::Reparsed);
930 cx.notify();
931 }
932
933 pub fn update_diagnostics<T>(
934 &mut self,
935 mut diagnostics: Vec<DiagnosticEntry<T>>,
936 version: Option<i32>,
937 cx: &mut ModelContext<Self>,
938 ) -> Result<()>
939 where
940 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
941 {
942 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
943 Ordering::Equal
944 .then_with(|| b.is_primary.cmp(&a.is_primary))
945 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
946 .then_with(|| a.severity.cmp(&b.severity))
947 .then_with(|| a.message.cmp(&b.message))
948 }
949
950 let version = version.map(|version| version as usize);
951 let content =
952 if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
953 language_server.snapshot_for_version(version)?
954 } else {
955 self.deref()
956 };
957
958 diagnostics.sort_unstable_by(|a, b| {
959 Ordering::Equal
960 .then_with(|| a.range.start.cmp(&b.range.start))
961 .then_with(|| b.range.end.cmp(&a.range.end))
962 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
963 });
964
965 let mut sanitized_diagnostics = Vec::new();
966 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
967 let mut last_edit_old_end = T::default();
968 let mut last_edit_new_end = T::default();
969 'outer: for entry in diagnostics {
970 let mut start = entry.range.start;
971 let mut end = entry.range.end;
972
973 // Some diagnostics are based on files on disk instead of buffers'
974 // current contents. Adjust these diagnostics' ranges to reflect
975 // any unsaved edits.
976 if entry.diagnostic.is_disk_based {
977 while let Some(edit) = edits_since_save.peek() {
978 if edit.old.end <= start {
979 last_edit_old_end = edit.old.end;
980 last_edit_new_end = edit.new.end;
981 edits_since_save.next();
982 } else if edit.old.start <= end && edit.old.end >= start {
983 continue 'outer;
984 } else {
985 break;
986 }
987 }
988
989 let start_overshoot = start - last_edit_old_end;
990 start = last_edit_new_end;
991 start.add_assign(&start_overshoot);
992
993 let end_overshoot = end - last_edit_old_end;
994 end = last_edit_new_end;
995 end.add_assign(&end_overshoot);
996 }
997
998 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
999 let mut range = range.start.to_point(content)..range.end.to_point(content);
1000 // Expand empty ranges by one character
1001 if range.start == range.end {
1002 range.end.column += 1;
1003 range.end = content.clip_point(range.end, Bias::Right);
1004 if range.start == range.end && range.end.column > 0 {
1005 range.start.column -= 1;
1006 range.start = content.clip_point(range.start, Bias::Left);
1007 }
1008 }
1009
1010 sanitized_diagnostics.push(DiagnosticEntry {
1011 range,
1012 diagnostic: entry.diagnostic,
1013 });
1014 }
1015 drop(edits_since_save);
1016
1017 let set = DiagnosticSet::new(sanitized_diagnostics, content);
1018 let lamport_timestamp = self.text.lamport_clock.tick();
1019 self.apply_diagnostic_update(set.clone(), lamport_timestamp, cx);
1020
1021 let op = Operation::UpdateDiagnostics {
1022 diagnostics: set.iter().cloned().collect(),
1023 lamport_timestamp,
1024 };
1025 self.send_operation(op, cx);
1026 Ok(())
1027 }
1028
1029 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1030 if let Some(indent_columns) = self.compute_autoindents() {
1031 let indent_columns = cx.background().spawn(indent_columns);
1032 match cx
1033 .background()
1034 .block_with_timeout(Duration::from_micros(500), indent_columns)
1035 {
1036 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
1037 Err(indent_columns) => {
1038 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1039 let indent_columns = indent_columns.await;
1040 this.update(&mut cx, |this, cx| {
1041 this.apply_autoindents(indent_columns, cx);
1042 });
1043 }));
1044 }
1045 }
1046 }
1047 }
1048
1049 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
1050 let max_rows_between_yields = 100;
1051 let snapshot = self.snapshot();
1052 if snapshot.language.is_none()
1053 || snapshot.tree.is_none()
1054 || self.autoindent_requests.is_empty()
1055 {
1056 return None;
1057 }
1058
1059 let autoindent_requests = self.autoindent_requests.clone();
1060 Some(async move {
1061 let mut indent_columns = BTreeMap::new();
1062 for request in autoindent_requests {
1063 let old_to_new_rows = request
1064 .edited
1065 .iter()
1066 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1067 .zip(
1068 request
1069 .edited
1070 .iter()
1071 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1072 )
1073 .collect::<BTreeMap<u32, u32>>();
1074
1075 let mut old_suggestions = HashMap::<u32, u32>::default();
1076 let old_edited_ranges =
1077 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1078 for old_edited_range in old_edited_ranges {
1079 let suggestions = request
1080 .before_edit
1081 .suggest_autoindents(old_edited_range.clone())
1082 .into_iter()
1083 .flatten();
1084 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1085 let indentation_basis = old_to_new_rows
1086 .get(&suggestion.basis_row)
1087 .and_then(|from_row| old_suggestions.get(from_row).copied())
1088 .unwrap_or_else(|| {
1089 request
1090 .before_edit
1091 .indent_column_for_line(suggestion.basis_row)
1092 });
1093 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1094 old_suggestions.insert(
1095 *old_to_new_rows.get(&old_row).unwrap(),
1096 indentation_basis + delta,
1097 );
1098 }
1099 yield_now().await;
1100 }
1101
1102 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1103 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1104 let new_edited_row_ranges =
1105 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1106 for new_edited_row_range in new_edited_row_ranges {
1107 let suggestions = snapshot
1108 .suggest_autoindents(new_edited_row_range.clone())
1109 .into_iter()
1110 .flatten();
1111 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1112 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1113 let new_indentation = indent_columns
1114 .get(&suggestion.basis_row)
1115 .copied()
1116 .unwrap_or_else(|| {
1117 snapshot.indent_column_for_line(suggestion.basis_row)
1118 })
1119 + delta;
1120 if old_suggestions
1121 .get(&new_row)
1122 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1123 {
1124 indent_columns.insert(new_row, new_indentation);
1125 }
1126 }
1127 yield_now().await;
1128 }
1129
1130 if let Some(inserted) = request.inserted.as_ref() {
1131 let inserted_row_ranges = contiguous_ranges(
1132 inserted
1133 .iter()
1134 .map(|range| range.to_point(&snapshot))
1135 .flat_map(|range| range.start.row..range.end.row + 1),
1136 max_rows_between_yields,
1137 );
1138 for inserted_row_range in inserted_row_ranges {
1139 let suggestions = snapshot
1140 .suggest_autoindents(inserted_row_range.clone())
1141 .into_iter()
1142 .flatten();
1143 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1144 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1145 let new_indentation = indent_columns
1146 .get(&suggestion.basis_row)
1147 .copied()
1148 .unwrap_or_else(|| {
1149 snapshot.indent_column_for_line(suggestion.basis_row)
1150 })
1151 + delta;
1152 indent_columns.insert(row, new_indentation);
1153 }
1154 yield_now().await;
1155 }
1156 }
1157 }
1158 indent_columns
1159 })
1160 }
1161
1162 fn apply_autoindents(
1163 &mut self,
1164 indent_columns: BTreeMap<u32, u32>,
1165 cx: &mut ModelContext<Self>,
1166 ) {
1167 self.autoindent_requests.clear();
1168 self.start_transaction();
1169 for (row, indent_column) in &indent_columns {
1170 self.set_indent_column_for_line(*row, *indent_column, cx);
1171 }
1172 self.end_transaction(cx);
1173 }
1174
1175 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1176 let current_column = self.indent_column_for_line(row);
1177 if column > current_column {
1178 let offset = Point::new(row, 0).to_offset(&*self);
1179 self.edit(
1180 [offset..offset],
1181 " ".repeat((column - current_column) as usize),
1182 cx,
1183 );
1184 } else if column < current_column {
1185 self.edit(
1186 [Point::new(row, 0)..Point::new(row, current_column - column)],
1187 "",
1188 cx,
1189 );
1190 }
1191 }
1192
1193 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1194 // TODO: it would be nice to not allocate here.
1195 let old_text = self.text();
1196 let base_version = self.version();
1197 cx.background().spawn(async move {
1198 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1199 .iter_all_changes()
1200 .map(|c| (c.tag(), c.value().len()))
1201 .collect::<Vec<_>>();
1202 Diff {
1203 base_version,
1204 new_text,
1205 changes,
1206 start_offset: 0,
1207 }
1208 })
1209 }
1210
1211 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1212 if self.version == diff.base_version {
1213 self.start_transaction();
1214 let mut offset = diff.start_offset;
1215 for (tag, len) in diff.changes {
1216 let range = offset..(offset + len);
1217 match tag {
1218 ChangeTag::Equal => offset += len,
1219 ChangeTag::Delete => {
1220 self.edit([range], "", cx);
1221 }
1222 ChangeTag::Insert => {
1223 self.edit(
1224 [offset..offset],
1225 &diff.new_text
1226 [range.start - diff.start_offset..range.end - diff.start_offset],
1227 cx,
1228 );
1229 offset += len;
1230 }
1231 }
1232 }
1233 self.end_transaction(cx);
1234 true
1235 } else {
1236 false
1237 }
1238 }
1239
1240 pub fn is_dirty(&self) -> bool {
1241 !self.saved_version.observed_all(&self.version)
1242 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1243 }
1244
1245 pub fn has_conflict(&self) -> bool {
1246 !self.saved_version.observed_all(&self.version)
1247 && self
1248 .file
1249 .as_ref()
1250 .map_or(false, |file| file.mtime() > self.saved_mtime)
1251 }
1252
1253 pub fn subscribe(&mut self) -> Subscription {
1254 self.text.subscribe()
1255 }
1256
1257 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1258 self.start_transaction_at(Instant::now())
1259 }
1260
1261 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1262 self.text.start_transaction_at(now)
1263 }
1264
1265 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1266 self.end_transaction_at(Instant::now(), cx)
1267 }
1268
1269 pub fn end_transaction_at(
1270 &mut self,
1271 now: Instant,
1272 cx: &mut ModelContext<Self>,
1273 ) -> Option<TransactionId> {
1274 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1275 let was_dirty = start_version != self.saved_version;
1276 self.did_edit(&start_version, was_dirty, cx);
1277 Some(transaction_id)
1278 } else {
1279 None
1280 }
1281 }
1282
1283 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1284 self.text.push_transaction(transaction, now);
1285 }
1286
1287 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1288 self.text.finalize_last_transaction()
1289 }
1290
1291 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1292 self.text.forget_transaction(transaction_id);
1293 }
1294
1295 pub fn wait_for_edits(
1296 &mut self,
1297 edit_ids: impl IntoIterator<Item = clock::Local>,
1298 ) -> impl Future<Output = ()> {
1299 self.text.wait_for_edits(edit_ids)
1300 }
1301
1302 pub fn wait_for_anchors<'a>(
1303 &mut self,
1304 anchors: impl IntoIterator<Item = &'a Anchor>,
1305 ) -> impl Future<Output = ()> {
1306 self.text.wait_for_anchors(anchors)
1307 }
1308
1309 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = ()> {
1310 self.text.wait_for_version(version)
1311 }
1312
1313 pub fn set_active_selections(
1314 &mut self,
1315 selections: Arc<[Selection<Anchor>]>,
1316 cx: &mut ModelContext<Self>,
1317 ) {
1318 let lamport_timestamp = self.text.lamport_clock.tick();
1319 self.remote_selections.insert(
1320 self.text.replica_id(),
1321 SelectionSet {
1322 selections: selections.clone(),
1323 lamport_timestamp,
1324 },
1325 );
1326 self.send_operation(
1327 Operation::UpdateSelections {
1328 selections,
1329 lamport_timestamp,
1330 },
1331 cx,
1332 );
1333 }
1334
1335 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1336 self.set_active_selections(Arc::from([]), cx);
1337 }
1338
1339 fn update_language_server(&mut self, cx: &AppContext) {
1340 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1341 language_server
1342 } else {
1343 return;
1344 };
1345 let file = if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
1346 file
1347 } else {
1348 return;
1349 };
1350
1351 let version = post_inc(&mut language_server.next_version);
1352 let snapshot = LanguageServerSnapshot {
1353 buffer_snapshot: self.text.snapshot(),
1354 version,
1355 path: Arc::from(file.abs_path(cx)),
1356 };
1357 language_server
1358 .pending_snapshots
1359 .insert(version, snapshot.clone());
1360 let _ = language_server.latest_snapshot.blocking_send(snapshot);
1361 }
1362
1363 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Local>
1364 where
1365 T: Into<String>,
1366 {
1367 self.edit_internal([0..self.len()], text, false, cx)
1368 }
1369
1370 pub fn edit<I, S, T>(
1371 &mut self,
1372 ranges_iter: I,
1373 new_text: T,
1374 cx: &mut ModelContext<Self>,
1375 ) -> Option<clock::Local>
1376 where
1377 I: IntoIterator<Item = Range<S>>,
1378 S: ToOffset,
1379 T: Into<String>,
1380 {
1381 self.edit_internal(ranges_iter, new_text, false, cx)
1382 }
1383
1384 pub fn edit_with_autoindent<I, S, T>(
1385 &mut self,
1386 ranges_iter: I,
1387 new_text: T,
1388 cx: &mut ModelContext<Self>,
1389 ) -> Option<clock::Local>
1390 where
1391 I: IntoIterator<Item = Range<S>>,
1392 S: ToOffset,
1393 T: Into<String>,
1394 {
1395 self.edit_internal(ranges_iter, new_text, true, cx)
1396 }
1397
1398 pub fn edit_internal<I, S, T>(
1399 &mut self,
1400 ranges_iter: I,
1401 new_text: T,
1402 autoindent: bool,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Option<clock::Local>
1405 where
1406 I: IntoIterator<Item = Range<S>>,
1407 S: ToOffset,
1408 T: Into<String>,
1409 {
1410 let new_text = new_text.into();
1411
1412 // Skip invalid ranges and coalesce contiguous ones.
1413 let mut ranges: Vec<Range<usize>> = Vec::new();
1414 for range in ranges_iter {
1415 let range = range.start.to_offset(self)..range.end.to_offset(self);
1416 if !new_text.is_empty() || !range.is_empty() {
1417 if let Some(prev_range) = ranges.last_mut() {
1418 if prev_range.end >= range.start {
1419 prev_range.end = cmp::max(prev_range.end, range.end);
1420 } else {
1421 ranges.push(range);
1422 }
1423 } else {
1424 ranges.push(range);
1425 }
1426 }
1427 }
1428 if ranges.is_empty() {
1429 return None;
1430 }
1431
1432 self.start_transaction();
1433 self.pending_autoindent.take();
1434 let autoindent_request = if autoindent && self.language.is_some() {
1435 let before_edit = self.snapshot();
1436 let edited = ranges
1437 .iter()
1438 .filter_map(|range| {
1439 let start = range.start.to_point(self);
1440 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1441 None
1442 } else {
1443 Some(self.anchor_before(range.start))
1444 }
1445 })
1446 .collect();
1447 Some((before_edit, edited))
1448 } else {
1449 None
1450 };
1451
1452 let first_newline_ix = new_text.find('\n');
1453 let new_text_len = new_text.len();
1454
1455 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1456 let edit_id = edit.local_timestamp();
1457
1458 if let Some((before_edit, edited)) = autoindent_request {
1459 let mut inserted = None;
1460 if let Some(first_newline_ix) = first_newline_ix {
1461 let mut delta = 0isize;
1462 inserted = Some(
1463 ranges
1464 .iter()
1465 .map(|range| {
1466 let start =
1467 (delta + range.start as isize) as usize + first_newline_ix + 1;
1468 let end = (delta + range.start as isize) as usize + new_text_len;
1469 delta +=
1470 (range.end as isize - range.start as isize) + new_text_len as isize;
1471 self.anchor_before(start)..self.anchor_after(end)
1472 })
1473 .collect(),
1474 );
1475 }
1476
1477 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1478 before_edit,
1479 edited,
1480 inserted,
1481 }));
1482 }
1483
1484 self.end_transaction(cx);
1485 self.send_operation(Operation::Buffer(edit), cx);
1486 Some(edit_id)
1487 }
1488
1489 pub fn edits_from_lsp(
1490 &mut self,
1491 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
1492 version: Option<i32>,
1493 cx: &mut ModelContext<Self>,
1494 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
1495 let snapshot = if let Some((version, state)) = version.zip(self.language_server.as_mut()) {
1496 state
1497 .snapshot_for_version(version as usize)
1498 .map(Clone::clone)
1499 } else {
1500 Ok(TextBuffer::deref(self).clone())
1501 };
1502
1503 cx.background().spawn(async move {
1504 let snapshot = snapshot?;
1505 let mut lsp_edits = lsp_edits
1506 .into_iter()
1507 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
1508 .peekable();
1509
1510 let mut edits = Vec::new();
1511 while let Some((mut range, mut new_text)) = lsp_edits.next() {
1512 // Combine any LSP edits that are adjacent.
1513 //
1514 // Also, combine LSP edits that are separated from each other by only
1515 // a newline. This is important because for some code actions,
1516 // Rust-analyzer rewrites the entire buffer via a series of edits that
1517 // are separated by unchanged newline characters.
1518 //
1519 // In order for the diffing logic below to work properly, any edits that
1520 // cancel each other out must be combined into one.
1521 while let Some((next_range, next_text)) = lsp_edits.peek() {
1522 if next_range.start > range.end {
1523 if next_range.start.row > range.end.row + 1
1524 || next_range.start.column > 0
1525 || snapshot.clip_point_utf16(
1526 PointUtf16::new(range.end.row, u32::MAX),
1527 Bias::Left,
1528 ) > range.end
1529 {
1530 break;
1531 }
1532 new_text.push('\n');
1533 }
1534 range.end = next_range.end;
1535 new_text.push_str(&next_text);
1536 lsp_edits.next();
1537 }
1538
1539 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
1540 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
1541 {
1542 return Err(anyhow!("invalid edits received from language server"));
1543 }
1544
1545 // For multiline edits, perform a diff of the old and new text so that
1546 // we can identify the changes more precisely, preserving the locations
1547 // of any anchors positioned in the unchanged regions.
1548 if range.end.row > range.start.row {
1549 let mut offset = range.start.to_offset(&snapshot);
1550 let old_text = snapshot.text_for_range(range).collect::<String>();
1551
1552 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
1553 let mut moved_since_edit = true;
1554 for change in diff.iter_all_changes() {
1555 let tag = change.tag();
1556 let value = change.value();
1557 match tag {
1558 ChangeTag::Equal => {
1559 offset += value.len();
1560 moved_since_edit = true;
1561 }
1562 ChangeTag::Delete => {
1563 let start = snapshot.anchor_after(offset);
1564 let end = snapshot.anchor_before(offset + value.len());
1565 if moved_since_edit {
1566 edits.push((start..end, String::new()));
1567 } else {
1568 edits.last_mut().unwrap().0.end = end;
1569 }
1570 offset += value.len();
1571 moved_since_edit = false;
1572 }
1573 ChangeTag::Insert => {
1574 if moved_since_edit {
1575 let anchor = snapshot.anchor_after(offset);
1576 edits.push((anchor.clone()..anchor, value.to_string()));
1577 } else {
1578 edits.last_mut().unwrap().1.push_str(value);
1579 }
1580 moved_since_edit = false;
1581 }
1582 }
1583 }
1584 } else if range.end == range.start {
1585 let anchor = snapshot.anchor_after(range.start);
1586 edits.push((anchor.clone()..anchor, new_text));
1587 } else {
1588 let edit_start = snapshot.anchor_after(range.start);
1589 let edit_end = snapshot.anchor_before(range.end);
1590 edits.push((edit_start..edit_end, new_text));
1591 }
1592 }
1593
1594 Ok(edits)
1595 })
1596 }
1597
1598 fn did_edit(
1599 &mut self,
1600 old_version: &clock::Global,
1601 was_dirty: bool,
1602 cx: &mut ModelContext<Self>,
1603 ) {
1604 if self.edits_since::<usize>(old_version).next().is_none() {
1605 return;
1606 }
1607
1608 self.reparse(cx);
1609 self.update_language_server(cx);
1610
1611 cx.emit(Event::Edited);
1612 if !was_dirty {
1613 cx.emit(Event::Dirtied);
1614 }
1615 cx.notify();
1616 }
1617
1618 fn grammar(&self) -> Option<&Arc<Grammar>> {
1619 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1620 }
1621
1622 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1623 &mut self,
1624 ops: I,
1625 cx: &mut ModelContext<Self>,
1626 ) -> Result<()> {
1627 self.pending_autoindent.take();
1628 let was_dirty = self.is_dirty();
1629 let old_version = self.version.clone();
1630 let mut deferred_ops = Vec::new();
1631 let buffer_ops = ops
1632 .into_iter()
1633 .filter_map(|op| match op {
1634 Operation::Buffer(op) => Some(op),
1635 _ => {
1636 if self.can_apply_op(&op) {
1637 self.apply_op(op, cx);
1638 } else {
1639 deferred_ops.push(op);
1640 }
1641 None
1642 }
1643 })
1644 .collect::<Vec<_>>();
1645 self.text.apply_ops(buffer_ops)?;
1646 self.deferred_ops.insert(deferred_ops);
1647 self.flush_deferred_ops(cx);
1648 self.did_edit(&old_version, was_dirty, cx);
1649 // Notify independently of whether the buffer was edited as the operations could include a
1650 // selection update.
1651 cx.notify();
1652 Ok(())
1653 }
1654
1655 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1656 let mut deferred_ops = Vec::new();
1657 for op in self.deferred_ops.drain().iter().cloned() {
1658 if self.can_apply_op(&op) {
1659 self.apply_op(op, cx);
1660 } else {
1661 deferred_ops.push(op);
1662 }
1663 }
1664 self.deferred_ops.insert(deferred_ops);
1665 }
1666
1667 fn can_apply_op(&self, operation: &Operation) -> bool {
1668 match operation {
1669 Operation::Buffer(_) => {
1670 unreachable!("buffer operations should never be applied at this layer")
1671 }
1672 Operation::UpdateDiagnostics {
1673 diagnostics: diagnostic_set,
1674 ..
1675 } => diagnostic_set.iter().all(|diagnostic| {
1676 self.text.can_resolve(&diagnostic.range.start)
1677 && self.text.can_resolve(&diagnostic.range.end)
1678 }),
1679 Operation::UpdateSelections { selections, .. } => selections
1680 .iter()
1681 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1682 Operation::UpdateCompletionTriggers { .. } => true,
1683 }
1684 }
1685
1686 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1687 match operation {
1688 Operation::Buffer(_) => {
1689 unreachable!("buffer operations should never be applied at this layer")
1690 }
1691 Operation::UpdateDiagnostics {
1692 diagnostics: diagnostic_set,
1693 lamport_timestamp,
1694 } => {
1695 let snapshot = self.snapshot();
1696 self.apply_diagnostic_update(
1697 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1698 lamport_timestamp,
1699 cx,
1700 );
1701 }
1702 Operation::UpdateSelections {
1703 selections,
1704 lamport_timestamp,
1705 } => {
1706 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1707 if set.lamport_timestamp > lamport_timestamp {
1708 return;
1709 }
1710 }
1711
1712 self.remote_selections.insert(
1713 lamport_timestamp.replica_id,
1714 SelectionSet {
1715 selections,
1716 lamport_timestamp,
1717 },
1718 );
1719 self.text.lamport_clock.observe(lamport_timestamp);
1720 self.selections_update_count += 1;
1721 }
1722 Operation::UpdateCompletionTriggers {
1723 triggers,
1724 lamport_timestamp,
1725 } => {
1726 self.completion_triggers = triggers;
1727 self.text.lamport_clock.observe(lamport_timestamp);
1728 }
1729 }
1730 }
1731
1732 fn apply_diagnostic_update(
1733 &mut self,
1734 diagnostics: DiagnosticSet,
1735 lamport_timestamp: clock::Lamport,
1736 cx: &mut ModelContext<Self>,
1737 ) {
1738 if lamport_timestamp > self.diagnostics_timestamp {
1739 self.diagnostics = diagnostics;
1740 self.diagnostics_timestamp = lamport_timestamp;
1741 self.diagnostics_update_count += 1;
1742 self.text.lamport_clock.observe(lamport_timestamp);
1743 cx.notify();
1744 cx.emit(Event::DiagnosticsUpdated);
1745 }
1746 }
1747
1748 #[cfg(not(test))]
1749 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1750 if let Some(file) = &self.file {
1751 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1752 }
1753 }
1754
1755 #[cfg(test)]
1756 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1757 self.operations.push(operation);
1758 }
1759
1760 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1761 self.remote_selections.remove(&replica_id);
1762 cx.notify();
1763 }
1764
1765 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1766 let was_dirty = self.is_dirty();
1767 let old_version = self.version.clone();
1768
1769 if let Some((transaction_id, operation)) = self.text.undo() {
1770 self.send_operation(Operation::Buffer(operation), cx);
1771 self.did_edit(&old_version, was_dirty, cx);
1772 Some(transaction_id)
1773 } else {
1774 None
1775 }
1776 }
1777
1778 pub fn undo_to_transaction(
1779 &mut self,
1780 transaction_id: TransactionId,
1781 cx: &mut ModelContext<Self>,
1782 ) -> bool {
1783 let was_dirty = self.is_dirty();
1784 let old_version = self.version.clone();
1785
1786 let operations = self.text.undo_to_transaction(transaction_id);
1787 let undone = !operations.is_empty();
1788 for operation in operations {
1789 self.send_operation(Operation::Buffer(operation), cx);
1790 }
1791 if undone {
1792 self.did_edit(&old_version, was_dirty, cx)
1793 }
1794 undone
1795 }
1796
1797 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1798 let was_dirty = self.is_dirty();
1799 let old_version = self.version.clone();
1800
1801 if let Some((transaction_id, operation)) = self.text.redo() {
1802 self.send_operation(Operation::Buffer(operation), cx);
1803 self.did_edit(&old_version, was_dirty, cx);
1804 Some(transaction_id)
1805 } else {
1806 None
1807 }
1808 }
1809
1810 pub fn redo_to_transaction(
1811 &mut self,
1812 transaction_id: TransactionId,
1813 cx: &mut ModelContext<Self>,
1814 ) -> bool {
1815 let was_dirty = self.is_dirty();
1816 let old_version = self.version.clone();
1817
1818 let operations = self.text.redo_to_transaction(transaction_id);
1819 let redone = !operations.is_empty();
1820 for operation in operations {
1821 self.send_operation(Operation::Buffer(operation), cx);
1822 }
1823 if redone {
1824 self.did_edit(&old_version, was_dirty, cx)
1825 }
1826 redone
1827 }
1828
1829 pub fn completion_triggers(&self) -> &[String] {
1830 &self.completion_triggers
1831 }
1832}
1833
1834#[cfg(any(test, feature = "test-support"))]
1835impl Buffer {
1836 pub fn set_group_interval(&mut self, group_interval: Duration) {
1837 self.text.set_group_interval(group_interval);
1838 }
1839
1840 pub fn randomly_edit<T>(
1841 &mut self,
1842 rng: &mut T,
1843 old_range_count: usize,
1844 cx: &mut ModelContext<Self>,
1845 ) where
1846 T: rand::Rng,
1847 {
1848 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1849 for _ in 0..old_range_count {
1850 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1851 if last_end > self.len() {
1852 break;
1853 }
1854 old_ranges.push(self.text.random_byte_range(last_end, rng));
1855 }
1856 let new_text_len = rng.gen_range(0..10);
1857 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1858 .take(new_text_len)
1859 .collect();
1860 log::info!(
1861 "mutating buffer {} at {:?}: {:?}",
1862 self.replica_id(),
1863 old_ranges,
1864 new_text
1865 );
1866 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1867 }
1868
1869 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1870 let was_dirty = self.is_dirty();
1871 let old_version = self.version.clone();
1872
1873 let ops = self.text.randomly_undo_redo(rng);
1874 if !ops.is_empty() {
1875 for op in ops {
1876 self.send_operation(Operation::Buffer(op), cx);
1877 self.did_edit(&old_version, was_dirty, cx);
1878 }
1879 }
1880 }
1881}
1882
1883impl Entity for Buffer {
1884 type Event = Event;
1885
1886 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1887 if let Some(file) = self.file.as_ref() {
1888 file.buffer_removed(self.remote_id(), cx);
1889 if let Some((lang_server, file)) = self.language_server.as_ref().zip(file.as_local()) {
1890 let request = lang_server
1891 .server
1892 .notify::<lsp::notification::DidCloseTextDocument>(
1893 lsp::DidCloseTextDocumentParams {
1894 text_document: lsp::TextDocumentIdentifier::new(
1895 lsp::Url::from_file_path(file.abs_path(cx)).unwrap(),
1896 ),
1897 },
1898 );
1899 cx.foreground().spawn(request).detach_and_log_err(cx);
1900 }
1901 }
1902 }
1903}
1904
1905impl Deref for Buffer {
1906 type Target = TextBuffer;
1907
1908 fn deref(&self) -> &Self::Target {
1909 &self.text
1910 }
1911}
1912
1913impl BufferSnapshot {
1914 fn suggest_autoindents<'a>(
1915 &'a self,
1916 row_range: Range<u32>,
1917 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1918 let mut query_cursor = QueryCursorHandle::new();
1919 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1920 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1921
1922 // Get the "indentation ranges" that intersect this row range.
1923 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1924 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1925 query_cursor.set_point_range(
1926 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1927 ..Point::new(row_range.end, 0).to_ts_point(),
1928 );
1929 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1930 for mat in query_cursor.matches(
1931 &grammar.indents_query,
1932 tree.root_node(),
1933 TextProvider(self.as_rope()),
1934 ) {
1935 let mut node_kind = "";
1936 let mut start: Option<Point> = None;
1937 let mut end: Option<Point> = None;
1938 for capture in mat.captures {
1939 if Some(capture.index) == indent_capture_ix {
1940 node_kind = capture.node.kind();
1941 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1942 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1943 } else if Some(capture.index) == end_capture_ix {
1944 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1945 }
1946 }
1947
1948 if let Some((start, end)) = start.zip(end) {
1949 if start.row == end.row {
1950 continue;
1951 }
1952
1953 let range = start..end;
1954 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1955 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1956 Ok(ix) => {
1957 let prev_range = &mut indentation_ranges[ix];
1958 prev_range.0.end = prev_range.0.end.max(range.end);
1959 }
1960 }
1961 }
1962 }
1963
1964 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1965 Some(row_range.map(move |row| {
1966 let row_start = Point::new(row, self.indent_column_for_line(row));
1967
1968 let mut indent_from_prev_row = false;
1969 let mut outdent_to_row = u32::MAX;
1970 for (range, _node_kind) in &indentation_ranges {
1971 if range.start.row >= row {
1972 break;
1973 }
1974
1975 if range.start.row == prev_row && range.end > row_start {
1976 indent_from_prev_row = true;
1977 }
1978 if range.end.row >= prev_row && range.end <= row_start {
1979 outdent_to_row = outdent_to_row.min(range.start.row);
1980 }
1981 }
1982
1983 let suggestion = if outdent_to_row == prev_row {
1984 IndentSuggestion {
1985 basis_row: prev_row,
1986 indent: false,
1987 }
1988 } else if indent_from_prev_row {
1989 IndentSuggestion {
1990 basis_row: prev_row,
1991 indent: true,
1992 }
1993 } else if outdent_to_row < prev_row {
1994 IndentSuggestion {
1995 basis_row: outdent_to_row,
1996 indent: false,
1997 }
1998 } else {
1999 IndentSuggestion {
2000 basis_row: prev_row,
2001 indent: false,
2002 }
2003 };
2004
2005 prev_row = row;
2006 suggestion
2007 }))
2008 } else {
2009 None
2010 }
2011 }
2012
2013 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2014 while row > 0 {
2015 row -= 1;
2016 if !self.is_line_blank(row) {
2017 return Some(row);
2018 }
2019 }
2020 None
2021 }
2022
2023 pub fn chunks<'a, T: ToOffset>(
2024 &'a self,
2025 range: Range<T>,
2026 language_aware: bool,
2027 ) -> BufferChunks<'a> {
2028 let range = range.start.to_offset(self)..range.end.to_offset(self);
2029
2030 let mut tree = None;
2031 let mut diagnostic_endpoints = Vec::new();
2032 if language_aware {
2033 tree = self.tree.as_ref();
2034 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
2035 diagnostic_endpoints.push(DiagnosticEndpoint {
2036 offset: entry.range.start,
2037 is_start: true,
2038 severity: entry.diagnostic.severity,
2039 });
2040 diagnostic_endpoints.push(DiagnosticEndpoint {
2041 offset: entry.range.end,
2042 is_start: false,
2043 severity: entry.diagnostic.severity,
2044 });
2045 }
2046 diagnostic_endpoints
2047 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2048 }
2049
2050 BufferChunks::new(
2051 self.text.as_rope(),
2052 range,
2053 tree,
2054 self.grammar(),
2055 diagnostic_endpoints,
2056 )
2057 }
2058
2059 pub fn language(&self) -> Option<&Arc<Language>> {
2060 self.language.as_ref()
2061 }
2062
2063 fn grammar(&self) -> Option<&Arc<Grammar>> {
2064 self.language
2065 .as_ref()
2066 .and_then(|language| language.grammar.as_ref())
2067 }
2068
2069 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2070 let tree = self.tree.as_ref()?;
2071 let range = range.start.to_offset(self)..range.end.to_offset(self);
2072 let mut cursor = tree.root_node().walk();
2073
2074 // Descend to smallest leaf that touches or exceeds the start of the range.
2075 while cursor.goto_first_child_for_byte(range.start).is_some() {}
2076
2077 // Ascend to the smallest ancestor that strictly contains the range.
2078 loop {
2079 let node_range = cursor.node().byte_range();
2080 if node_range.start <= range.start
2081 && node_range.end >= range.end
2082 && node_range.len() > range.len()
2083 {
2084 break;
2085 }
2086 if !cursor.goto_parent() {
2087 break;
2088 }
2089 }
2090
2091 let left_node = cursor.node();
2092
2093 // For an empty range, try to find another node immediately to the right of the range.
2094 if left_node.end_byte() == range.start {
2095 let mut right_node = None;
2096 while !cursor.goto_next_sibling() {
2097 if !cursor.goto_parent() {
2098 break;
2099 }
2100 }
2101
2102 while cursor.node().start_byte() == range.start {
2103 right_node = Some(cursor.node());
2104 if !cursor.goto_first_child() {
2105 break;
2106 }
2107 }
2108
2109 if let Some(right_node) = right_node {
2110 if right_node.is_named() || !left_node.is_named() {
2111 return Some(right_node.byte_range());
2112 }
2113 }
2114 }
2115
2116 Some(left_node.byte_range())
2117 }
2118
2119 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2120 let tree = self.tree.as_ref()?;
2121 let grammar = self
2122 .language
2123 .as_ref()
2124 .and_then(|language| language.grammar.as_ref())?;
2125
2126 let mut cursor = QueryCursorHandle::new();
2127 let matches = cursor.matches(
2128 &grammar.outline_query,
2129 tree.root_node(),
2130 TextProvider(self.as_rope()),
2131 );
2132
2133 let mut chunks = self.chunks(0..self.len(), true);
2134
2135 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
2136 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
2137 let context_capture_ix = grammar
2138 .outline_query
2139 .capture_index_for_name("context")
2140 .unwrap_or(u32::MAX);
2141
2142 let mut stack = Vec::<Range<usize>>::new();
2143 let items = matches
2144 .filter_map(|mat| {
2145 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
2146 let range = item_node.start_byte()..item_node.end_byte();
2147 let mut text = String::new();
2148 let mut name_ranges = Vec::new();
2149 let mut highlight_ranges = Vec::new();
2150
2151 for capture in mat.captures {
2152 let node_is_name;
2153 if capture.index == name_capture_ix {
2154 node_is_name = true;
2155 } else if capture.index == context_capture_ix {
2156 node_is_name = false;
2157 } else {
2158 continue;
2159 }
2160
2161 let range = capture.node.start_byte()..capture.node.end_byte();
2162 if !text.is_empty() {
2163 text.push(' ');
2164 }
2165 if node_is_name {
2166 let mut start = text.len();
2167 let end = start + range.len();
2168
2169 // When multiple names are captured, then the matcheable text
2170 // includes the whitespace in between the names.
2171 if !name_ranges.is_empty() {
2172 start -= 1;
2173 }
2174
2175 name_ranges.push(start..end);
2176 }
2177
2178 let mut offset = range.start;
2179 chunks.seek(offset);
2180 while let Some(mut chunk) = chunks.next() {
2181 if chunk.text.len() > range.end - offset {
2182 chunk.text = &chunk.text[0..(range.end - offset)];
2183 offset = range.end;
2184 } else {
2185 offset += chunk.text.len();
2186 }
2187 let style = chunk
2188 .highlight_id
2189 .zip(theme)
2190 .and_then(|(highlight, theme)| highlight.style(theme));
2191 if let Some(style) = style {
2192 let start = text.len();
2193 let end = start + chunk.text.len();
2194 highlight_ranges.push((start..end, style));
2195 }
2196 text.push_str(chunk.text);
2197 if offset >= range.end {
2198 break;
2199 }
2200 }
2201 }
2202
2203 while stack.last().map_or(false, |prev_range| {
2204 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
2205 }) {
2206 stack.pop();
2207 }
2208 stack.push(range.clone());
2209
2210 Some(OutlineItem {
2211 depth: stack.len() - 1,
2212 range: self.anchor_after(range.start)..self.anchor_before(range.end),
2213 text,
2214 highlight_ranges,
2215 name_ranges,
2216 })
2217 })
2218 .collect::<Vec<_>>();
2219
2220 if items.is_empty() {
2221 None
2222 } else {
2223 Some(Outline::new(items))
2224 }
2225 }
2226
2227 pub fn enclosing_bracket_ranges<T: ToOffset>(
2228 &self,
2229 range: Range<T>,
2230 ) -> Option<(Range<usize>, Range<usize>)> {
2231 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
2232 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
2233 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
2234
2235 // Find bracket pairs that *inclusively* contain the given range.
2236 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
2237 let mut cursor = QueryCursorHandle::new();
2238 let matches = cursor.set_byte_range(range).matches(
2239 &grammar.brackets_query,
2240 tree.root_node(),
2241 TextProvider(self.as_rope()),
2242 );
2243
2244 // Get the ranges of the innermost pair of brackets.
2245 matches
2246 .filter_map(|mat| {
2247 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
2248 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
2249 Some((open.byte_range(), close.byte_range()))
2250 })
2251 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
2252 }
2253
2254 /*
2255 impl BufferSnapshot
2256 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
2257 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
2258 */
2259
2260 pub fn remote_selections_in_range<'a>(
2261 &'a self,
2262 range: Range<Anchor>,
2263 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
2264 {
2265 self.remote_selections
2266 .iter()
2267 .filter(|(replica_id, set)| {
2268 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2269 })
2270 .map(move |(replica_id, set)| {
2271 let start_ix = match set.selections.binary_search_by(|probe| {
2272 probe
2273 .end
2274 .cmp(&range.start, self)
2275 .unwrap()
2276 .then(Ordering::Greater)
2277 }) {
2278 Ok(ix) | Err(ix) => ix,
2279 };
2280 let end_ix = match set.selections.binary_search_by(|probe| {
2281 probe
2282 .start
2283 .cmp(&range.end, self)
2284 .unwrap()
2285 .then(Ordering::Less)
2286 }) {
2287 Ok(ix) | Err(ix) => ix,
2288 };
2289
2290 (*replica_id, set.selections[start_ix..end_ix].iter())
2291 })
2292 }
2293
2294 pub fn diagnostics_in_range<'a, T, O>(
2295 &'a self,
2296 search_range: Range<T>,
2297 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2298 where
2299 T: 'a + Clone + ToOffset,
2300 O: 'a + FromAnchor,
2301 {
2302 self.diagnostics.range(search_range.clone(), self, true)
2303 }
2304
2305 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2306 let mut groups = Vec::new();
2307 self.diagnostics.groups(&mut groups, self);
2308 groups
2309 }
2310
2311 pub fn diagnostic_group<'a, O>(
2312 &'a self,
2313 group_id: usize,
2314 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2315 where
2316 O: 'a + FromAnchor,
2317 {
2318 self.diagnostics.group(group_id, self)
2319 }
2320
2321 pub fn diagnostics_update_count(&self) -> usize {
2322 self.diagnostics_update_count
2323 }
2324
2325 pub fn parse_count(&self) -> usize {
2326 self.parse_count
2327 }
2328
2329 pub fn selections_update_count(&self) -> usize {
2330 self.selections_update_count
2331 }
2332
2333 pub fn path(&self) -> Option<&Arc<Path>> {
2334 self.path.as_ref()
2335 }
2336
2337 pub fn file_update_count(&self) -> usize {
2338 self.file_update_count
2339 }
2340}
2341
2342impl Clone for BufferSnapshot {
2343 fn clone(&self) -> Self {
2344 Self {
2345 text: self.text.clone(),
2346 tree: self.tree.clone(),
2347 path: self.path.clone(),
2348 remote_selections: self.remote_selections.clone(),
2349 diagnostics: self.diagnostics.clone(),
2350 selections_update_count: self.selections_update_count,
2351 diagnostics_update_count: self.diagnostics_update_count,
2352 file_update_count: self.file_update_count,
2353 is_parsing: self.is_parsing,
2354 language: self.language.clone(),
2355 parse_count: self.parse_count,
2356 }
2357 }
2358}
2359
2360impl Deref for BufferSnapshot {
2361 type Target = text::BufferSnapshot;
2362
2363 fn deref(&self) -> &Self::Target {
2364 &self.text
2365 }
2366}
2367
2368impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2369 type I = ByteChunks<'a>;
2370
2371 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2372 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2373 }
2374}
2375
2376pub(crate) struct ByteChunks<'a>(rope::Chunks<'a>);
2377
2378impl<'a> Iterator for ByteChunks<'a> {
2379 type Item = &'a [u8];
2380
2381 fn next(&mut self) -> Option<Self::Item> {
2382 self.0.next().map(str::as_bytes)
2383 }
2384}
2385
2386unsafe impl<'a> Send for BufferChunks<'a> {}
2387
2388impl<'a> BufferChunks<'a> {
2389 pub(crate) fn new(
2390 text: &'a Rope,
2391 range: Range<usize>,
2392 tree: Option<&'a Tree>,
2393 grammar: Option<&'a Arc<Grammar>>,
2394 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2395 ) -> Self {
2396 let mut highlights = None;
2397 if let Some((grammar, tree)) = grammar.zip(tree) {
2398 let mut query_cursor = QueryCursorHandle::new();
2399
2400 // TODO - add a Tree-sitter API to remove the need for this.
2401 let cursor = unsafe {
2402 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
2403 };
2404 let captures = cursor.set_byte_range(range.clone()).captures(
2405 &grammar.highlights_query,
2406 tree.root_node(),
2407 TextProvider(text),
2408 );
2409 highlights = Some(BufferChunkHighlights {
2410 captures,
2411 next_capture: None,
2412 stack: Default::default(),
2413 highlight_map: grammar.highlight_map(),
2414 _query_cursor: query_cursor,
2415 })
2416 }
2417
2418 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2419 let chunks = text.chunks_in_range(range.clone());
2420
2421 BufferChunks {
2422 range,
2423 chunks,
2424 diagnostic_endpoints,
2425 error_depth: 0,
2426 warning_depth: 0,
2427 information_depth: 0,
2428 hint_depth: 0,
2429 highlights,
2430 }
2431 }
2432
2433 pub fn seek(&mut self, offset: usize) {
2434 self.range.start = offset;
2435 self.chunks.seek(self.range.start);
2436 if let Some(highlights) = self.highlights.as_mut() {
2437 highlights
2438 .stack
2439 .retain(|(end_offset, _)| *end_offset > offset);
2440 if let Some((mat, capture_ix)) = &highlights.next_capture {
2441 let capture = mat.captures[*capture_ix as usize];
2442 if offset >= capture.node.start_byte() {
2443 let next_capture_end = capture.node.end_byte();
2444 if offset < next_capture_end {
2445 highlights.stack.push((
2446 next_capture_end,
2447 highlights.highlight_map.get(capture.index),
2448 ));
2449 }
2450 highlights.next_capture.take();
2451 }
2452 }
2453 highlights.captures.set_byte_range(self.range.clone());
2454 }
2455 }
2456
2457 pub fn offset(&self) -> usize {
2458 self.range.start
2459 }
2460
2461 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2462 let depth = match endpoint.severity {
2463 DiagnosticSeverity::ERROR => &mut self.error_depth,
2464 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2465 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2466 DiagnosticSeverity::HINT => &mut self.hint_depth,
2467 _ => return,
2468 };
2469 if endpoint.is_start {
2470 *depth += 1;
2471 } else {
2472 *depth -= 1;
2473 }
2474 }
2475
2476 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2477 if self.error_depth > 0 {
2478 Some(DiagnosticSeverity::ERROR)
2479 } else if self.warning_depth > 0 {
2480 Some(DiagnosticSeverity::WARNING)
2481 } else if self.information_depth > 0 {
2482 Some(DiagnosticSeverity::INFORMATION)
2483 } else if self.hint_depth > 0 {
2484 Some(DiagnosticSeverity::HINT)
2485 } else {
2486 None
2487 }
2488 }
2489}
2490
2491impl<'a> Iterator for BufferChunks<'a> {
2492 type Item = Chunk<'a>;
2493
2494 fn next(&mut self) -> Option<Self::Item> {
2495 let mut next_capture_start = usize::MAX;
2496 let mut next_diagnostic_endpoint = usize::MAX;
2497
2498 if let Some(highlights) = self.highlights.as_mut() {
2499 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2500 if *parent_capture_end <= self.range.start {
2501 highlights.stack.pop();
2502 } else {
2503 break;
2504 }
2505 }
2506
2507 if highlights.next_capture.is_none() {
2508 highlights.next_capture = highlights.captures.next();
2509 }
2510
2511 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2512 let capture = mat.captures[*capture_ix as usize];
2513 if self.range.start < capture.node.start_byte() {
2514 next_capture_start = capture.node.start_byte();
2515 break;
2516 } else {
2517 let highlight_id = highlights.highlight_map.get(capture.index);
2518 highlights
2519 .stack
2520 .push((capture.node.end_byte(), highlight_id));
2521 highlights.next_capture = highlights.captures.next();
2522 }
2523 }
2524 }
2525
2526 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2527 if endpoint.offset <= self.range.start {
2528 self.update_diagnostic_depths(endpoint);
2529 self.diagnostic_endpoints.next();
2530 } else {
2531 next_diagnostic_endpoint = endpoint.offset;
2532 break;
2533 }
2534 }
2535
2536 if let Some(chunk) = self.chunks.peek() {
2537 let chunk_start = self.range.start;
2538 let mut chunk_end = (self.chunks.offset() + chunk.len())
2539 .min(next_capture_start)
2540 .min(next_diagnostic_endpoint);
2541 let mut highlight_id = None;
2542 if let Some(highlights) = self.highlights.as_ref() {
2543 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2544 chunk_end = chunk_end.min(*parent_capture_end);
2545 highlight_id = Some(*parent_highlight_id);
2546 }
2547 }
2548
2549 let slice =
2550 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2551 self.range.start = chunk_end;
2552 if self.range.start == self.chunks.offset() + chunk.len() {
2553 self.chunks.next().unwrap();
2554 }
2555
2556 Some(Chunk {
2557 text: slice,
2558 highlight_id,
2559 diagnostic: self.current_diagnostic_severity(),
2560 })
2561 } else {
2562 None
2563 }
2564 }
2565}
2566
2567impl QueryCursorHandle {
2568 pub(crate) fn new() -> Self {
2569 QueryCursorHandle(Some(
2570 QUERY_CURSORS
2571 .lock()
2572 .pop()
2573 .unwrap_or_else(|| QueryCursor::new()),
2574 ))
2575 }
2576}
2577
2578impl Deref for QueryCursorHandle {
2579 type Target = QueryCursor;
2580
2581 fn deref(&self) -> &Self::Target {
2582 self.0.as_ref().unwrap()
2583 }
2584}
2585
2586impl DerefMut for QueryCursorHandle {
2587 fn deref_mut(&mut self) -> &mut Self::Target {
2588 self.0.as_mut().unwrap()
2589 }
2590}
2591
2592impl Drop for QueryCursorHandle {
2593 fn drop(&mut self) {
2594 let mut cursor = self.0.take().unwrap();
2595 cursor.set_byte_range(0..usize::MAX);
2596 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2597 QUERY_CURSORS.lock().push(cursor)
2598 }
2599}
2600
2601trait ToTreeSitterPoint {
2602 fn to_ts_point(self) -> tree_sitter::Point;
2603 fn from_ts_point(point: tree_sitter::Point) -> Self;
2604}
2605
2606impl ToTreeSitterPoint for Point {
2607 fn to_ts_point(self) -> tree_sitter::Point {
2608 tree_sitter::Point::new(self.row as usize, self.column as usize)
2609 }
2610
2611 fn from_ts_point(point: tree_sitter::Point) -> Self {
2612 Point::new(point.row as u32, point.column as u32)
2613 }
2614}
2615
2616impl operation_queue::Operation for Operation {
2617 fn lamport_timestamp(&self) -> clock::Lamport {
2618 match self {
2619 Operation::Buffer(_) => {
2620 unreachable!("buffer operations should never be deferred at this layer")
2621 }
2622 Operation::UpdateDiagnostics {
2623 lamport_timestamp, ..
2624 }
2625 | Operation::UpdateSelections {
2626 lamport_timestamp, ..
2627 }
2628 | Operation::UpdateCompletionTriggers {
2629 lamport_timestamp, ..
2630 } => *lamport_timestamp,
2631 }
2632 }
2633}
2634
2635impl LanguageServerState {
2636 fn snapshot_for_version(&mut self, version: usize) -> Result<&text::BufferSnapshot> {
2637 const OLD_VERSIONS_TO_RETAIN: usize = 10;
2638
2639 self.pending_snapshots
2640 .retain(|&v, _| v + OLD_VERSIONS_TO_RETAIN >= version);
2641 let snapshot = self
2642 .pending_snapshots
2643 .get(&version)
2644 .ok_or_else(|| anyhow!("missing snapshot"))?;
2645 Ok(&snapshot.buffer_snapshot)
2646 }
2647}
2648
2649impl Default for Diagnostic {
2650 fn default() -> Self {
2651 Self {
2652 code: Default::default(),
2653 severity: DiagnosticSeverity::ERROR,
2654 message: Default::default(),
2655 group_id: Default::default(),
2656 is_primary: Default::default(),
2657 is_valid: true,
2658 is_disk_based: false,
2659 }
2660 }
2661}
2662
2663impl Completion {
2664 pub fn sort_key(&self) -> (usize, &str) {
2665 let kind_key = match self.lsp_completion.kind {
2666 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2667 _ => 1,
2668 };
2669 (kind_key, &self.label.text[self.label.filter_range.clone()])
2670 }
2671
2672 pub fn is_snippet(&self) -> bool {
2673 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2674 }
2675}
2676
2677pub fn contiguous_ranges(
2678 values: impl Iterator<Item = u32>,
2679 max_len: usize,
2680) -> impl Iterator<Item = Range<u32>> {
2681 let mut values = values.into_iter();
2682 let mut current_range: Option<Range<u32>> = None;
2683 std::iter::from_fn(move || loop {
2684 if let Some(value) = values.next() {
2685 if let Some(range) = &mut current_range {
2686 if value == range.end && range.len() < max_len {
2687 range.end += 1;
2688 continue;
2689 }
2690 }
2691
2692 let prev_range = current_range.clone();
2693 current_range = Some(value..(value + 1));
2694 if prev_range.is_some() {
2695 return prev_range;
2696 }
2697 } else {
2698 return current_range.take();
2699 }
2700 })
2701}
2702
2703pub fn char_kind(c: char) -> CharKind {
2704 if c == '\n' {
2705 CharKind::Newline
2706 } else if c.is_whitespace() {
2707 CharKind::Whitespace
2708 } else if c.is_alphanumeric() || c == '_' {
2709 CharKind::Word
2710 } else {
2711 CharKind::Punctuation
2712 }
2713}