1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
5 PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 outline::OutlineItem,
10 range_from_lsp, Outline, ToLspPosition,
11};
12use anyhow::{anyhow, Result};
13use clock::ReplicaId;
14use futures::FutureExt as _;
15use gpui::{AppContext, Entity, ModelContext, MutableAppContext, Task};
16use lazy_static::lazy_static;
17use lsp::LanguageServer;
18use parking_lot::Mutex;
19use postage::{prelude::Stream, sink::Sink, watch};
20use similar::{ChangeTag, TextDiff};
21use smol::future::yield_now;
22use std::{
23 any::Any,
24 cell::RefCell,
25 cmp::{self, Ordering},
26 collections::{BTreeMap, HashMap},
27 ffi::OsString,
28 future::Future,
29 iter::{Iterator, Peekable},
30 ops::{Deref, DerefMut, Range, Sub},
31 path::{Path, PathBuf},
32 str,
33 sync::Arc,
34 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
35 vec,
36};
37use sum_tree::TreeMap;
38use text::{operation_queue::OperationQueue, rope::TextDimension};
39pub use text::{Buffer as TextBuffer, Operation as _, *};
40use theme::SyntaxTheme;
41use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
42use util::{post_inc, TryFutureExt as _};
43
44#[cfg(any(test, feature = "test-support"))]
45pub use tree_sitter_rust;
46
47pub use lsp::DiagnosticSeverity;
48
49thread_local! {
50 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
51}
52
53lazy_static! {
54 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
55}
56
57// TODO - Make this configurable
58const INDENT_SIZE: u32 = 4;
59
60pub struct Buffer {
61 text: TextBuffer,
62 file: Option<Box<dyn File>>,
63 saved_version: clock::Global,
64 saved_mtime: SystemTime,
65 language: Option<Arc<Language>>,
66 autoindent_requests: Vec<Arc<AutoindentRequest>>,
67 pending_autoindent: Option<Task<()>>,
68 sync_parse_timeout: Duration,
69 syntax_tree: Mutex<Option<SyntaxTree>>,
70 parsing_in_background: bool,
71 parse_count: usize,
72 diagnostics: DiagnosticSet,
73 remote_selections: TreeMap<ReplicaId, SelectionSet>,
74 selections_update_count: usize,
75 diagnostics_update_count: usize,
76 language_server: Option<LanguageServerState>,
77 completion_triggers: Vec<String>,
78 deferred_ops: OperationQueue<Operation>,
79 #[cfg(test)]
80 pub(crate) operations: Vec<Operation>,
81}
82
83pub struct BufferSnapshot {
84 text: text::BufferSnapshot,
85 tree: Option<Tree>,
86 diagnostics: DiagnosticSet,
87 diagnostics_update_count: usize,
88 remote_selections: TreeMap<ReplicaId, SelectionSet>,
89 selections_update_count: usize,
90 is_parsing: bool,
91 language: Option<Arc<Language>>,
92 parse_count: usize,
93}
94
95#[derive(Clone, Debug)]
96struct SelectionSet {
97 selections: Arc<[Selection<Anchor>]>,
98 lamport_timestamp: clock::Lamport,
99}
100
101#[derive(Clone, Debug, PartialEq, Eq)]
102pub struct GroupId {
103 source: Arc<str>,
104 id: usize,
105}
106
107#[derive(Clone, Debug, PartialEq, Eq)]
108pub struct Diagnostic {
109 pub code: Option<String>,
110 pub severity: DiagnosticSeverity,
111 pub message: String,
112 pub group_id: usize,
113 pub is_valid: bool,
114 pub is_primary: bool,
115 pub is_disk_based: bool,
116}
117
118#[derive(Clone, Debug)]
119pub struct Completion<T> {
120 pub old_range: Range<T>,
121 pub new_text: String,
122 pub label: Option<String>,
123 pub lsp_completion: lsp::CompletionItem,
124}
125
126struct LanguageServerState {
127 server: Arc<LanguageServer>,
128 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
129 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
130 next_version: usize,
131 _maintain_server: Task<()>,
132}
133
134#[derive(Clone)]
135struct LanguageServerSnapshot {
136 buffer_snapshot: text::BufferSnapshot,
137 version: usize,
138 path: Arc<Path>,
139}
140
141#[derive(Clone, Debug)]
142pub enum Operation {
143 Buffer(text::Operation),
144 UpdateDiagnostics {
145 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
146 lamport_timestamp: clock::Lamport,
147 },
148 UpdateSelections {
149 replica_id: ReplicaId,
150 selections: Arc<[Selection<Anchor>]>,
151 lamport_timestamp: clock::Lamport,
152 },
153 UpdateCompletionTriggers {
154 triggers: Vec<String>,
155 },
156}
157
158#[derive(Clone, Debug, Eq, PartialEq)]
159pub enum Event {
160 Edited,
161 Dirtied,
162 Saved,
163 FileHandleChanged,
164 Reloaded,
165 Reparsed,
166 DiagnosticsUpdated,
167 Closed,
168}
169
170pub trait File {
171 fn as_local(&self) -> Option<&dyn LocalFile>;
172
173 fn is_local(&self) -> bool {
174 self.as_local().is_some()
175 }
176
177 fn mtime(&self) -> SystemTime;
178
179 /// Returns the path of this file relative to the worktree's root directory.
180 fn path(&self) -> &Arc<Path>;
181
182 /// Returns the path of this file relative to the worktree's parent directory (this means it
183 /// includes the name of the worktree's root folder).
184 fn full_path(&self, cx: &AppContext) -> PathBuf;
185
186 /// Returns the last component of this handle's absolute path. If this handle refers to the root
187 /// of its worktree, then this method will return the name of the worktree itself.
188 fn file_name(&self, cx: &AppContext) -> OsString;
189
190 fn is_deleted(&self) -> bool;
191
192 fn save(
193 &self,
194 buffer_id: u64,
195 text: Rope,
196 version: clock::Global,
197 cx: &mut MutableAppContext,
198 ) -> Task<Result<(clock::Global, SystemTime)>>;
199
200 fn format_remote(&self, buffer_id: u64, cx: &mut MutableAppContext)
201 -> Option<Task<Result<()>>>;
202
203 fn completions(
204 &self,
205 buffer_id: u64,
206 position: Anchor,
207 language: Option<Arc<Language>>,
208 cx: &mut MutableAppContext,
209 ) -> Task<Result<Vec<Completion<Anchor>>>>;
210
211 fn apply_additional_edits_for_completion(
212 &self,
213 buffer_id: u64,
214 completion: Completion<Anchor>,
215 cx: &mut MutableAppContext,
216 ) -> Task<Result<Vec<clock::Local>>>;
217
218 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
219
220 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
221
222 fn as_any(&self) -> &dyn Any;
223
224 fn to_proto(&self) -> rpc::proto::File;
225}
226
227pub trait LocalFile: File {
228 /// Returns the absolute path of this file.
229 fn abs_path(&self, cx: &AppContext) -> PathBuf;
230
231 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
232
233 fn buffer_reloaded(
234 &self,
235 buffer_id: u64,
236 version: &clock::Global,
237 mtime: SystemTime,
238 cx: &mut MutableAppContext,
239 );
240}
241
242#[cfg(feature = "test-support")]
243pub struct FakeFile {
244 pub path: Arc<Path>,
245}
246
247#[cfg(feature = "test-support")]
248impl File for FakeFile {
249 fn as_local(&self) -> Option<&dyn LocalFile> {
250 Some(self)
251 }
252
253 fn mtime(&self) -> SystemTime {
254 SystemTime::UNIX_EPOCH
255 }
256
257 fn path(&self) -> &Arc<Path> {
258 &self.path
259 }
260
261 fn full_path(&self, _: &AppContext) -> PathBuf {
262 self.path.to_path_buf()
263 }
264
265 fn file_name(&self, _: &AppContext) -> OsString {
266 self.path.file_name().unwrap().to_os_string()
267 }
268
269 fn is_deleted(&self) -> bool {
270 false
271 }
272
273 fn save(
274 &self,
275 _: u64,
276 _: Rope,
277 _: clock::Global,
278 cx: &mut MutableAppContext,
279 ) -> Task<Result<(clock::Global, SystemTime)>> {
280 cx.spawn(|_| async move { Ok((Default::default(), SystemTime::UNIX_EPOCH)) })
281 }
282
283 fn format_remote(&self, _: u64, _: &mut MutableAppContext) -> Option<Task<Result<()>>> {
284 None
285 }
286
287 fn completions(
288 &self,
289 _: u64,
290 _: Anchor,
291 _: Option<Arc<Language>>,
292 _: &mut MutableAppContext,
293 ) -> Task<Result<Vec<Completion<Anchor>>>> {
294 Task::ready(Ok(Default::default()))
295 }
296
297 fn apply_additional_edits_for_completion(
298 &self,
299 _: u64,
300 _: Completion<Anchor>,
301 _: &mut MutableAppContext,
302 ) -> Task<Result<Vec<clock::Local>>> {
303 Task::ready(Ok(Default::default()))
304 }
305
306 fn buffer_updated(&self, _: u64, _: Operation, _: &mut MutableAppContext) {}
307
308 fn buffer_removed(&self, _: u64, _: &mut MutableAppContext) {}
309
310 fn as_any(&self) -> &dyn Any {
311 self
312 }
313
314 fn to_proto(&self) -> rpc::proto::File {
315 unimplemented!()
316 }
317}
318
319#[cfg(feature = "test-support")]
320impl LocalFile for FakeFile {
321 fn abs_path(&self, _: &AppContext) -> PathBuf {
322 self.path.to_path_buf()
323 }
324
325 fn load(&self, cx: &AppContext) -> Task<Result<String>> {
326 cx.background().spawn(async move { Ok(Default::default()) })
327 }
328
329 fn buffer_reloaded(&self, _: u64, _: &clock::Global, _: SystemTime, _: &mut MutableAppContext) {
330 }
331}
332
333pub(crate) struct QueryCursorHandle(Option<QueryCursor>);
334
335#[derive(Clone)]
336struct SyntaxTree {
337 tree: Tree,
338 version: clock::Global,
339}
340
341#[derive(Clone)]
342struct AutoindentRequest {
343 before_edit: BufferSnapshot,
344 edited: Vec<Anchor>,
345 inserted: Option<Vec<Range<Anchor>>>,
346}
347
348#[derive(Debug)]
349struct IndentSuggestion {
350 basis_row: u32,
351 indent: bool,
352}
353
354struct TextProvider<'a>(&'a Rope);
355
356struct BufferChunkHighlights<'a> {
357 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
358 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
359 stack: Vec<(usize, HighlightId)>,
360 highlight_map: HighlightMap,
361 _query_cursor: QueryCursorHandle,
362}
363
364pub struct BufferChunks<'a> {
365 range: Range<usize>,
366 chunks: rope::Chunks<'a>,
367 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
368 error_depth: usize,
369 warning_depth: usize,
370 information_depth: usize,
371 hint_depth: usize,
372 highlights: Option<BufferChunkHighlights<'a>>,
373}
374
375#[derive(Clone, Copy, Debug, Default)]
376pub struct Chunk<'a> {
377 pub text: &'a str,
378 pub highlight_id: Option<HighlightId>,
379 pub diagnostic: Option<DiagnosticSeverity>,
380}
381
382pub(crate) struct Diff {
383 base_version: clock::Global,
384 new_text: Arc<str>,
385 changes: Vec<(ChangeTag, usize)>,
386}
387
388#[derive(Clone, Copy)]
389pub(crate) struct DiagnosticEndpoint {
390 offset: usize,
391 is_start: bool,
392 severity: DiagnosticSeverity,
393}
394
395impl Buffer {
396 pub fn new<T: Into<Arc<str>>>(
397 replica_id: ReplicaId,
398 base_text: T,
399 cx: &mut ModelContext<Self>,
400 ) -> Self {
401 Self::build(
402 TextBuffer::new(
403 replica_id,
404 cx.model_id() as u64,
405 History::new(base_text.into()),
406 ),
407 None,
408 )
409 }
410
411 pub fn from_file<T: Into<Arc<str>>>(
412 replica_id: ReplicaId,
413 base_text: T,
414 file: Box<dyn File>,
415 cx: &mut ModelContext<Self>,
416 ) -> Self {
417 Self::build(
418 TextBuffer::new(
419 replica_id,
420 cx.model_id() as u64,
421 History::new(base_text.into()),
422 ),
423 Some(file),
424 )
425 }
426
427 pub fn from_proto(
428 replica_id: ReplicaId,
429 message: proto::BufferState,
430 file: Option<Box<dyn File>>,
431 cx: &mut ModelContext<Self>,
432 ) -> Result<Self> {
433 let fragments_len = message.fragments.len();
434 let buffer = TextBuffer::from_parts(
435 replica_id,
436 message.id,
437 &message.visible_text,
438 &message.deleted_text,
439 message
440 .undo_map
441 .into_iter()
442 .map(proto::deserialize_undo_map_entry),
443 message
444 .fragments
445 .into_iter()
446 .enumerate()
447 .map(|(i, fragment)| {
448 proto::deserialize_buffer_fragment(fragment, i, fragments_len)
449 }),
450 message.lamport_timestamp,
451 From::from(message.version),
452 );
453 let mut this = Self::build(buffer, file);
454 for selection_set in message.selections {
455 this.remote_selections.insert(
456 selection_set.replica_id as ReplicaId,
457 SelectionSet {
458 selections: proto::deserialize_selections(selection_set.selections),
459 lamport_timestamp: clock::Lamport {
460 replica_id: selection_set.replica_id as ReplicaId,
461 value: selection_set.lamport_timestamp,
462 },
463 },
464 );
465 }
466 let snapshot = this.snapshot();
467 let entries = proto::deserialize_diagnostics(message.diagnostics);
468 this.apply_diagnostic_update(
469 DiagnosticSet::from_sorted_entries(entries.into_iter().cloned(), &snapshot),
470 cx,
471 );
472
473 this.completion_triggers = message.completion_triggers;
474
475 let deferred_ops = message
476 .deferred_operations
477 .into_iter()
478 .map(proto::deserialize_operation)
479 .collect::<Result<Vec<_>>>()?;
480 this.apply_ops(deferred_ops, cx)?;
481
482 Ok(this)
483 }
484
485 pub fn to_proto(&self) -> proto::BufferState {
486 proto::BufferState {
487 id: self.remote_id(),
488 file: self.file.as_ref().map(|f| f.to_proto()),
489 visible_text: self.text.text(),
490 deleted_text: self.text.deleted_text(),
491 undo_map: self
492 .text
493 .undo_history()
494 .map(proto::serialize_undo_map_entry)
495 .collect(),
496 version: From::from(&self.version),
497 lamport_timestamp: self.lamport_clock.value,
498 fragments: self
499 .text
500 .fragments()
501 .map(proto::serialize_buffer_fragment)
502 .collect(),
503 selections: self
504 .remote_selections
505 .iter()
506 .map(|(replica_id, set)| proto::SelectionSet {
507 replica_id: *replica_id as u32,
508 selections: proto::serialize_selections(&set.selections),
509 lamport_timestamp: set.lamport_timestamp.value,
510 })
511 .collect(),
512 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
513 deferred_operations: self
514 .deferred_ops
515 .iter()
516 .map(proto::serialize_operation)
517 .chain(
518 self.text
519 .deferred_ops()
520 .map(|op| proto::serialize_operation(&Operation::Buffer(op.clone()))),
521 )
522 .collect(),
523 completion_triggers: self.completion_triggers.clone(),
524 }
525 }
526
527 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
528 self.set_language(Some(language), cx);
529 self
530 }
531
532 pub fn with_language_server(
533 mut self,
534 server: Arc<LanguageServer>,
535 cx: &mut ModelContext<Self>,
536 ) -> Self {
537 self.set_language_server(Some(server), cx);
538 self
539 }
540
541 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
542 let saved_mtime;
543 if let Some(file) = file.as_ref() {
544 saved_mtime = file.mtime();
545 } else {
546 saved_mtime = UNIX_EPOCH;
547 }
548
549 Self {
550 saved_mtime,
551 saved_version: buffer.version(),
552 text: buffer,
553 file,
554 syntax_tree: Mutex::new(None),
555 parsing_in_background: false,
556 parse_count: 0,
557 sync_parse_timeout: Duration::from_millis(1),
558 autoindent_requests: Default::default(),
559 pending_autoindent: Default::default(),
560 language: None,
561 remote_selections: Default::default(),
562 selections_update_count: 0,
563 diagnostics: Default::default(),
564 diagnostics_update_count: 0,
565 language_server: None,
566 completion_triggers: Default::default(),
567 deferred_ops: OperationQueue::new(),
568 #[cfg(test)]
569 operations: Default::default(),
570 }
571 }
572
573 pub fn snapshot(&self) -> BufferSnapshot {
574 BufferSnapshot {
575 text: self.text.snapshot(),
576 tree: self.syntax_tree(),
577 remote_selections: self.remote_selections.clone(),
578 diagnostics: self.diagnostics.clone(),
579 diagnostics_update_count: self.diagnostics_update_count,
580 is_parsing: self.parsing_in_background,
581 language: self.language.clone(),
582 parse_count: self.parse_count,
583 selections_update_count: self.selections_update_count,
584 }
585 }
586
587 pub fn file(&self) -> Option<&dyn File> {
588 self.file.as_deref()
589 }
590
591 pub fn format(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
592 let file = if let Some(file) = self.file.as_ref() {
593 file
594 } else {
595 return Task::ready(Err(anyhow!("buffer has no file")));
596 };
597
598 if let Some(LanguageServerState { server, .. }) = self.language_server.as_ref() {
599 let server = server.clone();
600 let abs_path = file.as_local().unwrap().abs_path(cx);
601 let version = self.version();
602 cx.spawn(|this, mut cx| async move {
603 let edits = server
604 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
605 text_document: lsp::TextDocumentIdentifier::new(
606 lsp::Url::from_file_path(&abs_path).unwrap(),
607 ),
608 options: Default::default(),
609 work_done_progress_params: Default::default(),
610 })
611 .await?;
612
613 if let Some(edits) = edits {
614 this.update(&mut cx, |this, cx| {
615 if this.version == version {
616 this.apply_lsp_edits(edits, cx)?;
617 Ok(())
618 } else {
619 Err(anyhow!("buffer edited since starting to format"))
620 }
621 })
622 } else {
623 Ok(())
624 }
625 })
626 } else {
627 let format = file.format_remote(self.remote_id(), cx.as_mut());
628 cx.spawn(|_, _| async move {
629 if let Some(format) = format {
630 format.await?;
631 }
632 Ok(())
633 })
634 }
635 }
636
637 pub fn save(
638 &mut self,
639 cx: &mut ModelContext<Self>,
640 ) -> Task<Result<(clock::Global, SystemTime)>> {
641 let file = if let Some(file) = self.file.as_ref() {
642 file
643 } else {
644 return Task::ready(Err(anyhow!("buffer has no file")));
645 };
646 let text = self.as_rope().clone();
647 let version = self.version();
648 let save = file.save(self.remote_id(), text, version, cx.as_mut());
649 cx.spawn(|this, mut cx| async move {
650 let (version, mtime) = save.await?;
651 this.update(&mut cx, |this, cx| {
652 this.did_save(version.clone(), mtime, None, cx);
653 });
654 Ok((version, mtime))
655 })
656 }
657
658 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
659 self.language = language;
660 self.reparse(cx);
661 }
662
663 pub fn set_language_server(
664 &mut self,
665 language_server: Option<Arc<lsp::LanguageServer>>,
666 cx: &mut ModelContext<Self>,
667 ) {
668 self.language_server = if let Some(server) = language_server {
669 let (latest_snapshot_tx, mut latest_snapshot_rx) =
670 watch::channel::<Option<LanguageServerSnapshot>>();
671
672 let maintain_changes = cx.background().spawn({
673 let server = server.clone();
674 async move {
675 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
676 while let Some(snapshot) = latest_snapshot_rx.recv().await {
677 if let Some(snapshot) = snapshot {
678 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
679 if let Some(prev_snapshot) = prev_snapshot {
680 let changes = lsp::DidChangeTextDocumentParams {
681 text_document: lsp::VersionedTextDocumentIdentifier::new(
682 uri,
683 snapshot.version as i32,
684 ),
685 content_changes: snapshot
686 .buffer_snapshot
687 .edits_since::<(PointUtf16, usize)>(
688 prev_snapshot.buffer_snapshot.version(),
689 )
690 .map(|edit| {
691 let edit_start = edit.new.start.0;
692 let edit_end =
693 edit_start + (edit.old.end.0 - edit.old.start.0);
694 let new_text = snapshot
695 .buffer_snapshot
696 .text_for_range(edit.new.start.1..edit.new.end.1)
697 .collect();
698 lsp::TextDocumentContentChangeEvent {
699 range: Some(lsp::Range::new(
700 edit_start.to_lsp_position(),
701 edit_end.to_lsp_position(),
702 )),
703 range_length: None,
704 text: new_text,
705 }
706 })
707 .collect(),
708 };
709 server
710 .notify::<lsp::notification::DidChangeTextDocument>(changes)
711 .await?;
712 } else {
713 server
714 .notify::<lsp::notification::DidOpenTextDocument>(
715 lsp::DidOpenTextDocumentParams {
716 text_document: lsp::TextDocumentItem::new(
717 uri,
718 Default::default(),
719 snapshot.version as i32,
720 snapshot.buffer_snapshot.text().to_string(),
721 ),
722 },
723 )
724 .await?;
725 }
726
727 prev_snapshot = Some(snapshot);
728 }
729 }
730 Ok(())
731 }
732 });
733
734 Some(LanguageServerState {
735 latest_snapshot: latest_snapshot_tx,
736 pending_snapshots: Default::default(),
737 next_version: 0,
738 server: server.clone(),
739 _maintain_server: cx.spawn_weak(|this, mut cx| async move {
740 let mut capabilities = server.capabilities();
741 loop {
742 if let Some(capabilities) = capabilities.recv().await.flatten() {
743 if let Some(this) = this.upgrade(&cx) {
744 let triggers = capabilities
745 .completion_provider
746 .and_then(|c| c.trigger_characters)
747 .unwrap_or_default();
748 this.update(&mut cx, |this, cx| {
749 this.completion_triggers = triggers.clone();
750 this.send_operation(
751 Operation::UpdateCompletionTriggers { triggers },
752 cx,
753 );
754 });
755 } else {
756 return;
757 }
758
759 break;
760 }
761 }
762
763 maintain_changes.log_err().await;
764 }),
765 })
766 } else {
767 None
768 };
769
770 self.update_language_server(cx);
771 }
772
773 pub fn did_save(
774 &mut self,
775 version: clock::Global,
776 mtime: SystemTime,
777 new_file: Option<Box<dyn File>>,
778 cx: &mut ModelContext<Self>,
779 ) {
780 self.saved_mtime = mtime;
781 self.saved_version = version;
782 if let Some(new_file) = new_file {
783 self.file = Some(new_file);
784 }
785 if let Some((state, local_file)) = &self
786 .language_server
787 .as_ref()
788 .zip(self.file.as_ref().and_then(|f| f.as_local()))
789 {
790 cx.background()
791 .spawn(
792 state
793 .server
794 .notify::<lsp::notification::DidSaveTextDocument>(
795 lsp::DidSaveTextDocumentParams {
796 text_document: lsp::TextDocumentIdentifier {
797 uri: lsp::Url::from_file_path(local_file.abs_path(cx)).unwrap(),
798 },
799 text: None,
800 },
801 ),
802 )
803 .detach()
804 }
805 cx.emit(Event::Saved);
806 }
807
808 pub fn did_reload(
809 &mut self,
810 version: clock::Global,
811 mtime: SystemTime,
812 cx: &mut ModelContext<Self>,
813 ) {
814 self.saved_mtime = mtime;
815 self.saved_version = version;
816 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
817 file.buffer_reloaded(self.remote_id(), &self.saved_version, self.saved_mtime, cx);
818 }
819 cx.emit(Event::Reloaded);
820 cx.notify();
821 }
822
823 pub fn file_updated(
824 &mut self,
825 new_file: Box<dyn File>,
826 cx: &mut ModelContext<Self>,
827 ) -> Task<()> {
828 let old_file = if let Some(file) = self.file.as_ref() {
829 file
830 } else {
831 return Task::ready(());
832 };
833 let mut file_changed = false;
834 let mut task = Task::ready(());
835
836 if new_file.path() != old_file.path() {
837 file_changed = true;
838 }
839
840 if new_file.is_deleted() {
841 if !old_file.is_deleted() {
842 file_changed = true;
843 if !self.is_dirty() {
844 cx.emit(Event::Dirtied);
845 }
846 }
847 } else {
848 let new_mtime = new_file.mtime();
849 if new_mtime != old_file.mtime() {
850 file_changed = true;
851
852 if !self.is_dirty() {
853 task = cx.spawn(|this, mut cx| {
854 async move {
855 let new_text = this.read_with(&cx, |this, cx| {
856 this.file
857 .as_ref()
858 .and_then(|file| file.as_local().map(|f| f.load(cx)))
859 });
860 if let Some(new_text) = new_text {
861 let new_text = new_text.await?;
862 let diff = this
863 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
864 .await;
865 this.update(&mut cx, |this, cx| {
866 if this.apply_diff(diff, cx) {
867 this.did_reload(this.version(), new_mtime, cx);
868 }
869 });
870 }
871 Ok(())
872 }
873 .log_err()
874 .map(drop)
875 });
876 }
877 }
878 }
879
880 if file_changed {
881 cx.emit(Event::FileHandleChanged);
882 }
883 self.file = Some(new_file);
884 task
885 }
886
887 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
888 cx.emit(Event::Closed);
889 }
890
891 pub fn language(&self) -> Option<&Arc<Language>> {
892 self.language.as_ref()
893 }
894
895 pub fn language_server(&self) -> Option<&Arc<LanguageServer>> {
896 self.language_server.as_ref().map(|state| &state.server)
897 }
898
899 pub fn parse_count(&self) -> usize {
900 self.parse_count
901 }
902
903 pub fn selections_update_count(&self) -> usize {
904 self.selections_update_count
905 }
906
907 pub fn diagnostics_update_count(&self) -> usize {
908 self.diagnostics_update_count
909 }
910
911 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
912 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
913 self.interpolate_tree(syntax_tree);
914 Some(syntax_tree.tree.clone())
915 } else {
916 None
917 }
918 }
919
920 #[cfg(any(test, feature = "test-support"))]
921 pub fn is_parsing(&self) -> bool {
922 self.parsing_in_background
923 }
924
925 #[cfg(test)]
926 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
927 self.sync_parse_timeout = timeout;
928 }
929
930 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
931 if self.parsing_in_background {
932 return false;
933 }
934
935 if let Some(grammar) = self.grammar().cloned() {
936 let old_tree = self.syntax_tree();
937 let text = self.as_rope().clone();
938 let parsed_version = self.version();
939 let parse_task = cx.background().spawn({
940 let grammar = grammar.clone();
941 async move { Self::parse_text(&text, old_tree, &grammar) }
942 });
943
944 match cx
945 .background()
946 .block_with_timeout(self.sync_parse_timeout, parse_task)
947 {
948 Ok(new_tree) => {
949 self.did_finish_parsing(new_tree, parsed_version, cx);
950 return true;
951 }
952 Err(parse_task) => {
953 self.parsing_in_background = true;
954 cx.spawn(move |this, mut cx| async move {
955 let new_tree = parse_task.await;
956 this.update(&mut cx, move |this, cx| {
957 let grammar_changed = this
958 .grammar()
959 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
960 let parse_again =
961 this.version.changed_since(&parsed_version) || grammar_changed;
962 this.parsing_in_background = false;
963 this.did_finish_parsing(new_tree, parsed_version, cx);
964
965 if parse_again && this.reparse(cx) {
966 return;
967 }
968 });
969 })
970 .detach();
971 }
972 }
973 }
974 false
975 }
976
977 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
978 PARSER.with(|parser| {
979 let mut parser = parser.borrow_mut();
980 parser
981 .set_language(grammar.ts_language)
982 .expect("incompatible grammar");
983 let mut chunks = text.chunks_in_range(0..text.len());
984 let tree = parser
985 .parse_with(
986 &mut move |offset, _| {
987 chunks.seek(offset);
988 chunks.next().unwrap_or("").as_bytes()
989 },
990 old_tree.as_ref(),
991 )
992 .unwrap();
993 tree
994 })
995 }
996
997 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
998 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
999 let (bytes, lines) = edit.flatten();
1000 tree.tree.edit(&InputEdit {
1001 start_byte: bytes.new.start,
1002 old_end_byte: bytes.new.start + bytes.old.len(),
1003 new_end_byte: bytes.new.end,
1004 start_position: lines.new.start.to_ts_point(),
1005 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
1006 .to_ts_point(),
1007 new_end_position: lines.new.end.to_ts_point(),
1008 });
1009 }
1010 tree.version = self.version();
1011 }
1012
1013 fn did_finish_parsing(
1014 &mut self,
1015 tree: Tree,
1016 version: clock::Global,
1017 cx: &mut ModelContext<Self>,
1018 ) {
1019 self.parse_count += 1;
1020 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
1021 self.request_autoindent(cx);
1022 cx.emit(Event::Reparsed);
1023 cx.notify();
1024 }
1025
1026 pub fn update_diagnostics<T>(
1027 &mut self,
1028 version: Option<i32>,
1029 mut diagnostics: Vec<DiagnosticEntry<T>>,
1030 cx: &mut ModelContext<Self>,
1031 ) -> Result<()>
1032 where
1033 T: Copy + Ord + TextDimension + Sub<Output = T> + Clip + ToPoint,
1034 {
1035 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1036 Ordering::Equal
1037 .then_with(|| b.is_primary.cmp(&a.is_primary))
1038 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1039 .then_with(|| a.severity.cmp(&b.severity))
1040 .then_with(|| a.message.cmp(&b.message))
1041 }
1042
1043 let version = version.map(|version| version as usize);
1044 let content =
1045 if let Some((version, language_server)) = version.zip(self.language_server.as_mut()) {
1046 language_server
1047 .pending_snapshots
1048 .retain(|&v, _| v >= version);
1049 let snapshot = language_server
1050 .pending_snapshots
1051 .get(&version)
1052 .ok_or_else(|| anyhow!("missing snapshot"))?;
1053 &snapshot.buffer_snapshot
1054 } else {
1055 self.deref()
1056 };
1057
1058 diagnostics.sort_unstable_by(|a, b| {
1059 Ordering::Equal
1060 .then_with(|| a.range.start.cmp(&b.range.start))
1061 .then_with(|| b.range.end.cmp(&a.range.end))
1062 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1063 });
1064
1065 let mut sanitized_diagnostics = Vec::new();
1066 let mut edits_since_save = content.edits_since::<T>(&self.saved_version).peekable();
1067 let mut last_edit_old_end = T::default();
1068 let mut last_edit_new_end = T::default();
1069 'outer: for entry in diagnostics {
1070 let mut start = entry.range.start;
1071 let mut end = entry.range.end;
1072
1073 // Some diagnostics are based on files on disk instead of buffers'
1074 // current contents. Adjust these diagnostics' ranges to reflect
1075 // any unsaved edits.
1076 if entry.diagnostic.is_disk_based {
1077 while let Some(edit) = edits_since_save.peek() {
1078 if edit.old.end <= start {
1079 last_edit_old_end = edit.old.end;
1080 last_edit_new_end = edit.new.end;
1081 edits_since_save.next();
1082 } else if edit.old.start <= end && edit.old.end >= start {
1083 continue 'outer;
1084 } else {
1085 break;
1086 }
1087 }
1088
1089 let start_overshoot = start - last_edit_old_end;
1090 start = last_edit_new_end;
1091 start.add_assign(&start_overshoot);
1092
1093 let end_overshoot = end - last_edit_old_end;
1094 end = last_edit_new_end;
1095 end.add_assign(&end_overshoot);
1096 }
1097
1098 let range = start.clip(Bias::Left, content)..end.clip(Bias::Right, content);
1099 let mut range = range.start.to_point(content)..range.end.to_point(content);
1100 // Expand empty ranges by one character
1101 if range.start == range.end {
1102 range.end.column += 1;
1103 range.end = content.clip_point(range.end, Bias::Right);
1104 if range.start == range.end && range.end.column > 0 {
1105 range.start.column -= 1;
1106 range.start = content.clip_point(range.start, Bias::Left);
1107 }
1108 }
1109
1110 sanitized_diagnostics.push(DiagnosticEntry {
1111 range,
1112 diagnostic: entry.diagnostic,
1113 });
1114 }
1115 drop(edits_since_save);
1116
1117 let set = DiagnosticSet::new(sanitized_diagnostics, content);
1118 self.apply_diagnostic_update(set.clone(), cx);
1119
1120 let op = Operation::UpdateDiagnostics {
1121 diagnostics: set.iter().cloned().collect(),
1122 lamport_timestamp: self.text.lamport_clock.tick(),
1123 };
1124 self.send_operation(op, cx);
1125 Ok(())
1126 }
1127
1128 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
1129 if let Some(indent_columns) = self.compute_autoindents() {
1130 let indent_columns = cx.background().spawn(indent_columns);
1131 match cx
1132 .background()
1133 .block_with_timeout(Duration::from_micros(500), indent_columns)
1134 {
1135 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
1136 Err(indent_columns) => {
1137 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
1138 let indent_columns = indent_columns.await;
1139 this.update(&mut cx, |this, cx| {
1140 this.apply_autoindents(indent_columns, cx);
1141 });
1142 }));
1143 }
1144 }
1145 }
1146 }
1147
1148 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
1149 let max_rows_between_yields = 100;
1150 let snapshot = self.snapshot();
1151 if snapshot.language.is_none()
1152 || snapshot.tree.is_none()
1153 || self.autoindent_requests.is_empty()
1154 {
1155 return None;
1156 }
1157
1158 let autoindent_requests = self.autoindent_requests.clone();
1159 Some(async move {
1160 let mut indent_columns = BTreeMap::new();
1161 for request in autoindent_requests {
1162 let old_to_new_rows = request
1163 .edited
1164 .iter()
1165 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1166 .zip(
1167 request
1168 .edited
1169 .iter()
1170 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1171 )
1172 .collect::<BTreeMap<u32, u32>>();
1173
1174 let mut old_suggestions = HashMap::<u32, u32>::default();
1175 let old_edited_ranges =
1176 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1177 for old_edited_range in old_edited_ranges {
1178 let suggestions = request
1179 .before_edit
1180 .suggest_autoindents(old_edited_range.clone())
1181 .into_iter()
1182 .flatten();
1183 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1184 let indentation_basis = old_to_new_rows
1185 .get(&suggestion.basis_row)
1186 .and_then(|from_row| old_suggestions.get(from_row).copied())
1187 .unwrap_or_else(|| {
1188 request
1189 .before_edit
1190 .indent_column_for_line(suggestion.basis_row)
1191 });
1192 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1193 old_suggestions.insert(
1194 *old_to_new_rows.get(&old_row).unwrap(),
1195 indentation_basis + delta,
1196 );
1197 }
1198 yield_now().await;
1199 }
1200
1201 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1202 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1203 let new_edited_row_ranges =
1204 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1205 for new_edited_row_range in new_edited_row_ranges {
1206 let suggestions = snapshot
1207 .suggest_autoindents(new_edited_row_range.clone())
1208 .into_iter()
1209 .flatten();
1210 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1211 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1212 let new_indentation = indent_columns
1213 .get(&suggestion.basis_row)
1214 .copied()
1215 .unwrap_or_else(|| {
1216 snapshot.indent_column_for_line(suggestion.basis_row)
1217 })
1218 + delta;
1219 if old_suggestions
1220 .get(&new_row)
1221 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1222 {
1223 indent_columns.insert(new_row, new_indentation);
1224 }
1225 }
1226 yield_now().await;
1227 }
1228
1229 if let Some(inserted) = request.inserted.as_ref() {
1230 let inserted_row_ranges = contiguous_ranges(
1231 inserted
1232 .iter()
1233 .map(|range| range.to_point(&snapshot))
1234 .flat_map(|range| range.start.row..range.end.row + 1),
1235 max_rows_between_yields,
1236 );
1237 for inserted_row_range in inserted_row_ranges {
1238 let suggestions = snapshot
1239 .suggest_autoindents(inserted_row_range.clone())
1240 .into_iter()
1241 .flatten();
1242 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1243 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1244 let new_indentation = indent_columns
1245 .get(&suggestion.basis_row)
1246 .copied()
1247 .unwrap_or_else(|| {
1248 snapshot.indent_column_for_line(suggestion.basis_row)
1249 })
1250 + delta;
1251 indent_columns.insert(row, new_indentation);
1252 }
1253 yield_now().await;
1254 }
1255 }
1256 }
1257 indent_columns
1258 })
1259 }
1260
1261 fn apply_autoindents(
1262 &mut self,
1263 indent_columns: BTreeMap<u32, u32>,
1264 cx: &mut ModelContext<Self>,
1265 ) {
1266 self.autoindent_requests.clear();
1267 self.start_transaction();
1268 for (row, indent_column) in &indent_columns {
1269 self.set_indent_column_for_line(*row, *indent_column, cx);
1270 }
1271 self.end_transaction(cx);
1272 }
1273
1274 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1275 let current_column = self.indent_column_for_line(row);
1276 if column > current_column {
1277 let offset = Point::new(row, 0).to_offset(&*self);
1278 self.edit(
1279 [offset..offset],
1280 " ".repeat((column - current_column) as usize),
1281 cx,
1282 );
1283 } else if column < current_column {
1284 self.edit(
1285 [Point::new(row, 0)..Point::new(row, current_column - column)],
1286 "",
1287 cx,
1288 );
1289 }
1290 }
1291
1292 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1293 // TODO: it would be nice to not allocate here.
1294 let old_text = self.text();
1295 let base_version = self.version();
1296 cx.background().spawn(async move {
1297 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1298 .iter_all_changes()
1299 .map(|c| (c.tag(), c.value().len()))
1300 .collect::<Vec<_>>();
1301 Diff {
1302 base_version,
1303 new_text,
1304 changes,
1305 }
1306 })
1307 }
1308
1309 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1310 if self.version == diff.base_version {
1311 self.start_transaction();
1312 let mut offset = 0;
1313 for (tag, len) in diff.changes {
1314 let range = offset..(offset + len);
1315 match tag {
1316 ChangeTag::Equal => offset += len,
1317 ChangeTag::Delete => {
1318 self.edit(Some(range), "", cx);
1319 }
1320 ChangeTag::Insert => {
1321 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1322 offset += len;
1323 }
1324 }
1325 }
1326 self.end_transaction(cx);
1327 true
1328 } else {
1329 false
1330 }
1331 }
1332
1333 pub fn is_dirty(&self) -> bool {
1334 !self.saved_version.observed_all(&self.version)
1335 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1336 }
1337
1338 pub fn has_conflict(&self) -> bool {
1339 !self.saved_version.observed_all(&self.version)
1340 && self
1341 .file
1342 .as_ref()
1343 .map_or(false, |file| file.mtime() > self.saved_mtime)
1344 }
1345
1346 pub fn subscribe(&mut self) -> Subscription {
1347 self.text.subscribe()
1348 }
1349
1350 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1351 self.start_transaction_at(Instant::now())
1352 }
1353
1354 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1355 self.text.start_transaction_at(now)
1356 }
1357
1358 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1359 self.end_transaction_at(Instant::now(), cx)
1360 }
1361
1362 pub fn end_transaction_at(
1363 &mut self,
1364 now: Instant,
1365 cx: &mut ModelContext<Self>,
1366 ) -> Option<TransactionId> {
1367 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1368 let was_dirty = start_version != self.saved_version;
1369 self.did_edit(&start_version, was_dirty, cx);
1370 Some(transaction_id)
1371 } else {
1372 None
1373 }
1374 }
1375
1376 pub fn avoid_grouping_next_transaction(&mut self) {
1377 self.text.avoid_grouping_next_transaction();
1378 }
1379
1380 pub fn set_active_selections(
1381 &mut self,
1382 selections: Arc<[Selection<Anchor>]>,
1383 cx: &mut ModelContext<Self>,
1384 ) {
1385 let lamport_timestamp = self.text.lamport_clock.tick();
1386 self.remote_selections.insert(
1387 self.text.replica_id(),
1388 SelectionSet {
1389 selections: selections.clone(),
1390 lamport_timestamp,
1391 },
1392 );
1393 self.send_operation(
1394 Operation::UpdateSelections {
1395 replica_id: self.text.replica_id(),
1396 selections,
1397 lamport_timestamp,
1398 },
1399 cx,
1400 );
1401 }
1402
1403 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1404 self.set_active_selections(Arc::from([]), cx);
1405 }
1406
1407 fn update_language_server(&mut self, cx: &AppContext) {
1408 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1409 language_server
1410 } else {
1411 return;
1412 };
1413 let abs_path = self
1414 .file
1415 .as_ref()
1416 .and_then(|f| f.as_local())
1417 .map_or(Path::new("/").to_path_buf(), |file| file.abs_path(cx));
1418
1419 let version = post_inc(&mut language_server.next_version);
1420 let snapshot = LanguageServerSnapshot {
1421 buffer_snapshot: self.text.snapshot(),
1422 version,
1423 path: Arc::from(abs_path),
1424 };
1425 language_server
1426 .pending_snapshots
1427 .insert(version, snapshot.clone());
1428 let _ = language_server
1429 .latest_snapshot
1430 .blocking_send(Some(snapshot));
1431 }
1432
1433 pub fn edit<I, S, T>(
1434 &mut self,
1435 ranges_iter: I,
1436 new_text: T,
1437 cx: &mut ModelContext<Self>,
1438 ) -> Option<clock::Local>
1439 where
1440 I: IntoIterator<Item = Range<S>>,
1441 S: ToOffset,
1442 T: Into<String>,
1443 {
1444 self.edit_internal(ranges_iter, new_text, false, cx)
1445 }
1446
1447 pub fn edit_with_autoindent<I, S, T>(
1448 &mut self,
1449 ranges_iter: I,
1450 new_text: T,
1451 cx: &mut ModelContext<Self>,
1452 ) -> Option<clock::Local>
1453 where
1454 I: IntoIterator<Item = Range<S>>,
1455 S: ToOffset,
1456 T: Into<String>,
1457 {
1458 self.edit_internal(ranges_iter, new_text, true, cx)
1459 }
1460
1461 pub fn edit_internal<I, S, T>(
1462 &mut self,
1463 ranges_iter: I,
1464 new_text: T,
1465 autoindent: bool,
1466 cx: &mut ModelContext<Self>,
1467 ) -> Option<clock::Local>
1468 where
1469 I: IntoIterator<Item = Range<S>>,
1470 S: ToOffset,
1471 T: Into<String>,
1472 {
1473 let new_text = new_text.into();
1474
1475 // Skip invalid ranges and coalesce contiguous ones.
1476 let mut ranges: Vec<Range<usize>> = Vec::new();
1477 for range in ranges_iter {
1478 let range = range.start.to_offset(self)..range.end.to_offset(self);
1479 if !new_text.is_empty() || !range.is_empty() {
1480 if let Some(prev_range) = ranges.last_mut() {
1481 if prev_range.end >= range.start {
1482 prev_range.end = cmp::max(prev_range.end, range.end);
1483 } else {
1484 ranges.push(range);
1485 }
1486 } else {
1487 ranges.push(range);
1488 }
1489 }
1490 }
1491 if ranges.is_empty() {
1492 return None;
1493 }
1494
1495 self.start_transaction();
1496 self.pending_autoindent.take();
1497 let autoindent_request = if autoindent && self.language.is_some() {
1498 let before_edit = self.snapshot();
1499 let edited = ranges
1500 .iter()
1501 .filter_map(|range| {
1502 let start = range.start.to_point(self);
1503 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1504 None
1505 } else {
1506 Some(self.anchor_before(range.start))
1507 }
1508 })
1509 .collect();
1510 Some((before_edit, edited))
1511 } else {
1512 None
1513 };
1514
1515 let first_newline_ix = new_text.find('\n');
1516 let new_text_len = new_text.len();
1517
1518 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1519 let edit_id = edit.timestamp.local();
1520
1521 if let Some((before_edit, edited)) = autoindent_request {
1522 let mut inserted = None;
1523 if let Some(first_newline_ix) = first_newline_ix {
1524 let mut delta = 0isize;
1525 inserted = Some(
1526 ranges
1527 .iter()
1528 .map(|range| {
1529 let start =
1530 (delta + range.start as isize) as usize + first_newline_ix + 1;
1531 let end = (delta + range.start as isize) as usize + new_text_len;
1532 delta +=
1533 (range.end as isize - range.start as isize) + new_text_len as isize;
1534 self.anchor_before(start)..self.anchor_after(end)
1535 })
1536 .collect(),
1537 );
1538 }
1539
1540 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1541 before_edit,
1542 edited,
1543 inserted,
1544 }));
1545 }
1546
1547 self.end_transaction(cx);
1548 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1549 Some(edit_id)
1550 }
1551
1552 fn apply_lsp_edits(
1553 &mut self,
1554 edits: Vec<lsp::TextEdit>,
1555 cx: &mut ModelContext<Self>,
1556 ) -> Result<Vec<clock::Local>> {
1557 for edit in &edits {
1558 let range = range_from_lsp(edit.range);
1559 if self.clip_point_utf16(range.start, Bias::Left) != range.start
1560 || self.clip_point_utf16(range.end, Bias::Left) != range.end
1561 {
1562 return Err(anyhow!(
1563 "invalid formatting edits received from language server"
1564 ));
1565 }
1566 }
1567
1568 self.start_transaction();
1569 let edit_ids = edits
1570 .into_iter()
1571 .rev()
1572 .filter_map(|edit| self.edit([range_from_lsp(edit.range)], edit.new_text, cx))
1573 .collect();
1574 self.end_transaction(cx);
1575 Ok(edit_ids)
1576 }
1577
1578 fn did_edit(
1579 &mut self,
1580 old_version: &clock::Global,
1581 was_dirty: bool,
1582 cx: &mut ModelContext<Self>,
1583 ) {
1584 if self.edits_since::<usize>(old_version).next().is_none() {
1585 return;
1586 }
1587
1588 self.reparse(cx);
1589 self.update_language_server(cx);
1590
1591 cx.emit(Event::Edited);
1592 if !was_dirty {
1593 cx.emit(Event::Dirtied);
1594 }
1595 cx.notify();
1596 }
1597
1598 fn grammar(&self) -> Option<&Arc<Grammar>> {
1599 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1600 }
1601
1602 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1603 &mut self,
1604 ops: I,
1605 cx: &mut ModelContext<Self>,
1606 ) -> Result<()> {
1607 self.pending_autoindent.take();
1608 let was_dirty = self.is_dirty();
1609 let old_version = self.version.clone();
1610 let mut deferred_ops = Vec::new();
1611 let buffer_ops = ops
1612 .into_iter()
1613 .filter_map(|op| match op {
1614 Operation::Buffer(op) => Some(op),
1615 _ => {
1616 if self.can_apply_op(&op) {
1617 self.apply_op(op, cx);
1618 } else {
1619 deferred_ops.push(op);
1620 }
1621 None
1622 }
1623 })
1624 .collect::<Vec<_>>();
1625 self.text.apply_ops(buffer_ops)?;
1626 self.deferred_ops.insert(deferred_ops);
1627 self.flush_deferred_ops(cx);
1628 self.did_edit(&old_version, was_dirty, cx);
1629 // Notify independently of whether the buffer was edited as the operations could include a
1630 // selection update.
1631 cx.notify();
1632 Ok(())
1633 }
1634
1635 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1636 let mut deferred_ops = Vec::new();
1637 for op in self.deferred_ops.drain().iter().cloned() {
1638 if self.can_apply_op(&op) {
1639 self.apply_op(op, cx);
1640 } else {
1641 deferred_ops.push(op);
1642 }
1643 }
1644 self.deferred_ops.insert(deferred_ops);
1645 }
1646
1647 fn can_apply_op(&self, operation: &Operation) -> bool {
1648 match operation {
1649 Operation::Buffer(_) => {
1650 unreachable!("buffer operations should never be applied at this layer")
1651 }
1652 Operation::UpdateDiagnostics {
1653 diagnostics: diagnostic_set,
1654 ..
1655 } => diagnostic_set.iter().all(|diagnostic| {
1656 self.text.can_resolve(&diagnostic.range.start)
1657 && self.text.can_resolve(&diagnostic.range.end)
1658 }),
1659 Operation::UpdateSelections { selections, .. } => selections
1660 .iter()
1661 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1662 Operation::UpdateCompletionTriggers { .. } => true,
1663 }
1664 }
1665
1666 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1667 match operation {
1668 Operation::Buffer(_) => {
1669 unreachable!("buffer operations should never be applied at this layer")
1670 }
1671 Operation::UpdateDiagnostics {
1672 diagnostics: diagnostic_set,
1673 ..
1674 } => {
1675 let snapshot = self.snapshot();
1676 self.apply_diagnostic_update(
1677 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1678 cx,
1679 );
1680 }
1681 Operation::UpdateSelections {
1682 replica_id,
1683 selections,
1684 lamport_timestamp,
1685 } => {
1686 if let Some(set) = self.remote_selections.get(&replica_id) {
1687 if set.lamport_timestamp > lamport_timestamp {
1688 return;
1689 }
1690 }
1691
1692 self.remote_selections.insert(
1693 replica_id,
1694 SelectionSet {
1695 selections,
1696 lamport_timestamp,
1697 },
1698 );
1699 self.text.lamport_clock.observe(lamport_timestamp);
1700 self.selections_update_count += 1;
1701 }
1702 Operation::UpdateCompletionTriggers { triggers } => {
1703 self.completion_triggers = triggers;
1704 }
1705 }
1706 }
1707
1708 fn apply_diagnostic_update(&mut self, diagnostics: DiagnosticSet, cx: &mut ModelContext<Self>) {
1709 self.diagnostics = diagnostics;
1710 self.diagnostics_update_count += 1;
1711 cx.notify();
1712 cx.emit(Event::DiagnosticsUpdated);
1713 }
1714
1715 #[cfg(not(test))]
1716 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1717 if let Some(file) = &self.file {
1718 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1719 }
1720 }
1721
1722 #[cfg(test)]
1723 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1724 self.operations.push(operation);
1725 }
1726
1727 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1728 self.remote_selections.remove(&replica_id);
1729 cx.notify();
1730 }
1731
1732 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1733 let was_dirty = self.is_dirty();
1734 let old_version = self.version.clone();
1735
1736 if let Some((transaction_id, operation)) = self.text.undo() {
1737 self.send_operation(Operation::Buffer(operation), cx);
1738 self.did_edit(&old_version, was_dirty, cx);
1739 Some(transaction_id)
1740 } else {
1741 None
1742 }
1743 }
1744
1745 pub fn undo_transaction(
1746 &mut self,
1747 transaction_id: TransactionId,
1748 cx: &mut ModelContext<Self>,
1749 ) -> bool {
1750 let was_dirty = self.is_dirty();
1751 let old_version = self.version.clone();
1752
1753 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1754 self.send_operation(Operation::Buffer(operation), cx);
1755 self.did_edit(&old_version, was_dirty, cx);
1756 true
1757 } else {
1758 false
1759 }
1760 }
1761
1762 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1763 let was_dirty = self.is_dirty();
1764 let old_version = self.version.clone();
1765
1766 if let Some((transaction_id, operation)) = self.text.redo() {
1767 self.send_operation(Operation::Buffer(operation), cx);
1768 self.did_edit(&old_version, was_dirty, cx);
1769 Some(transaction_id)
1770 } else {
1771 None
1772 }
1773 }
1774
1775 pub fn redo_transaction(
1776 &mut self,
1777 transaction_id: TransactionId,
1778 cx: &mut ModelContext<Self>,
1779 ) -> bool {
1780 let was_dirty = self.is_dirty();
1781 let old_version = self.version.clone();
1782
1783 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1784 self.send_operation(Operation::Buffer(operation), cx);
1785 self.did_edit(&old_version, was_dirty, cx);
1786 true
1787 } else {
1788 false
1789 }
1790 }
1791
1792 pub fn completions<T>(
1793 &self,
1794 position: T,
1795 cx: &mut ModelContext<Self>,
1796 ) -> Task<Result<Vec<Completion<Anchor>>>>
1797 where
1798 T: ToOffset,
1799 {
1800 let file = if let Some(file) = self.file.as_ref() {
1801 file
1802 } else {
1803 return Task::ready(Ok(Default::default()));
1804 };
1805 let language = self.language.clone();
1806
1807 if let Some(file) = file.as_local() {
1808 let server = if let Some(language_server) = self.language_server.as_ref() {
1809 language_server.server.clone()
1810 } else {
1811 return Task::ready(Ok(Default::default()));
1812 };
1813 let abs_path = file.abs_path(cx);
1814 let position = self.offset_to_point_utf16(position.to_offset(self));
1815
1816 cx.spawn(|this, cx| async move {
1817 let completions = server
1818 .request::<lsp::request::Completion>(lsp::CompletionParams {
1819 text_document_position: lsp::TextDocumentPositionParams::new(
1820 lsp::TextDocumentIdentifier::new(
1821 lsp::Url::from_file_path(abs_path).unwrap(),
1822 ),
1823 position.to_lsp_position(),
1824 ),
1825 context: Default::default(),
1826 work_done_progress_params: Default::default(),
1827 partial_result_params: Default::default(),
1828 })
1829 .await?;
1830
1831 let completions = if let Some(completions) = completions {
1832 match completions {
1833 lsp::CompletionResponse::Array(completions) => completions,
1834 lsp::CompletionResponse::List(list) => list.items,
1835 }
1836 } else {
1837 Default::default()
1838 };
1839
1840 this.read_with(&cx, |this, _| {
1841 Ok(completions.into_iter().filter_map(|lsp_completion| {
1842 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1843 lsp::CompletionTextEdit::Edit(edit) => (range_from_lsp(edit.range), edit.new_text.clone()),
1844 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1845 log::info!("received an insert and replace completion but we don't yet support that");
1846 return None
1847 },
1848 };
1849
1850 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1851 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left) ;
1852 if clipped_start == old_range.start && clipped_end == old_range.end {
1853 Some(Completion {
1854 old_range: this.anchor_before(old_range.start)..this.anchor_after(old_range.end),
1855 new_text,
1856 label: language.as_ref().and_then(|l| l.label_for_completion(&lsp_completion)),
1857 lsp_completion,
1858 })
1859 } else {
1860 None
1861 }
1862 }).collect())
1863 })
1864 })
1865 } else {
1866 file.completions(
1867 self.remote_id(),
1868 self.anchor_before(position),
1869 language,
1870 cx.as_mut(),
1871 )
1872 }
1873 }
1874
1875 pub fn apply_additional_edits_for_completion(
1876 &mut self,
1877 completion: Completion<Anchor>,
1878 push_to_history: bool,
1879 cx: &mut ModelContext<Self>,
1880 ) -> Task<Result<Vec<clock::Local>>> {
1881 let file = if let Some(file) = self.file.as_ref() {
1882 file
1883 } else {
1884 return Task::ready(Ok(Default::default()));
1885 };
1886
1887 if file.is_local() {
1888 let server = if let Some(lang) = self.language_server.as_ref() {
1889 lang.server.clone()
1890 } else {
1891 return Task::ready(Ok(Default::default()));
1892 };
1893
1894 cx.spawn(|this, mut cx| async move {
1895 let resolved_completion = server
1896 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1897 .await?;
1898 if let Some(additional_edits) = resolved_completion.additional_text_edits {
1899 this.update(&mut cx, |this, cx| {
1900 if !push_to_history {
1901 this.avoid_grouping_next_transaction();
1902 }
1903 this.start_transaction();
1904 let edit_ids = this.apply_lsp_edits(additional_edits, cx);
1905 if let Some(transaction_id) = this.end_transaction(cx) {
1906 if !push_to_history {
1907 this.text.forget_transaction(transaction_id);
1908 }
1909 }
1910 edit_ids
1911 })
1912 } else {
1913 Ok(Default::default())
1914 }
1915 })
1916 } else {
1917 let apply_edits = file.apply_additional_edits_for_completion(
1918 self.remote_id(),
1919 completion,
1920 cx.as_mut(),
1921 );
1922 cx.spawn(|this, mut cx| async move {
1923 let edit_ids = apply_edits.await?;
1924 this.update(&mut cx, |this, _| this.text.wait_for_edits(&edit_ids))
1925 .await;
1926 if push_to_history {
1927 this.update(&mut cx, |this, _| {
1928 this.text
1929 .push_transaction(edit_ids.iter().copied(), Instant::now());
1930 });
1931 }
1932 Ok(edit_ids)
1933 })
1934 }
1935 }
1936
1937 pub fn completion_triggers(&self) -> &[String] {
1938 &self.completion_triggers
1939 }
1940}
1941
1942#[cfg(any(test, feature = "test-support"))]
1943impl Buffer {
1944 pub fn set_group_interval(&mut self, group_interval: Duration) {
1945 self.text.set_group_interval(group_interval);
1946 }
1947
1948 pub fn randomly_edit<T>(
1949 &mut self,
1950 rng: &mut T,
1951 old_range_count: usize,
1952 cx: &mut ModelContext<Self>,
1953 ) where
1954 T: rand::Rng,
1955 {
1956 let mut old_ranges: Vec<Range<usize>> = Vec::new();
1957 for _ in 0..old_range_count {
1958 let last_end = old_ranges.last().map_or(0, |last_range| last_range.end + 1);
1959 if last_end > self.len() {
1960 break;
1961 }
1962 old_ranges.push(self.text.random_byte_range(last_end, rng));
1963 }
1964 let new_text_len = rng.gen_range(0..10);
1965 let new_text: String = crate::random_char_iter::RandomCharIter::new(&mut *rng)
1966 .take(new_text_len)
1967 .collect();
1968 log::info!(
1969 "mutating buffer {} at {:?}: {:?}",
1970 self.replica_id(),
1971 old_ranges,
1972 new_text
1973 );
1974 self.edit(old_ranges.iter().cloned(), new_text.as_str(), cx);
1975 }
1976
1977 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1978 let was_dirty = self.is_dirty();
1979 let old_version = self.version.clone();
1980
1981 let ops = self.text.randomly_undo_redo(rng);
1982 if !ops.is_empty() {
1983 for op in ops {
1984 self.send_operation(Operation::Buffer(op), cx);
1985 self.did_edit(&old_version, was_dirty, cx);
1986 }
1987 }
1988 }
1989}
1990
1991impl Entity for Buffer {
1992 type Event = Event;
1993
1994 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1995 if let Some(file) = self.file.as_ref() {
1996 file.buffer_removed(self.remote_id(), cx);
1997 }
1998 }
1999}
2000
2001impl Deref for Buffer {
2002 type Target = TextBuffer;
2003
2004 fn deref(&self) -> &Self::Target {
2005 &self.text
2006 }
2007}
2008
2009impl BufferSnapshot {
2010 fn suggest_autoindents<'a>(
2011 &'a self,
2012 row_range: Range<u32>,
2013 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
2014 let mut query_cursor = QueryCursorHandle::new();
2015 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
2016 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2017
2018 // Get the "indentation ranges" that intersect this row range.
2019 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
2020 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
2021 query_cursor.set_point_range(
2022 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
2023 ..Point::new(row_range.end, 0).to_ts_point(),
2024 );
2025 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
2026 for mat in query_cursor.matches(
2027 &grammar.indents_query,
2028 tree.root_node(),
2029 TextProvider(self.as_rope()),
2030 ) {
2031 let mut node_kind = "";
2032 let mut start: Option<Point> = None;
2033 let mut end: Option<Point> = None;
2034 for capture in mat.captures {
2035 if Some(capture.index) == indent_capture_ix {
2036 node_kind = capture.node.kind();
2037 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2038 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2039 } else if Some(capture.index) == end_capture_ix {
2040 end = Some(Point::from_ts_point(capture.node.start_position().into()));
2041 }
2042 }
2043
2044 if let Some((start, end)) = start.zip(end) {
2045 if start.row == end.row {
2046 continue;
2047 }
2048
2049 let range = start..end;
2050 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
2051 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
2052 Ok(ix) => {
2053 let prev_range = &mut indentation_ranges[ix];
2054 prev_range.0.end = prev_range.0.end.max(range.end);
2055 }
2056 }
2057 }
2058 }
2059
2060 let mut prev_row = prev_non_blank_row.unwrap_or(0);
2061 Some(row_range.map(move |row| {
2062 let row_start = Point::new(row, self.indent_column_for_line(row));
2063
2064 let mut indent_from_prev_row = false;
2065 let mut outdent_to_row = u32::MAX;
2066 for (range, _node_kind) in &indentation_ranges {
2067 if range.start.row >= row {
2068 break;
2069 }
2070
2071 if range.start.row == prev_row && range.end > row_start {
2072 indent_from_prev_row = true;
2073 }
2074 if range.end.row >= prev_row && range.end <= row_start {
2075 outdent_to_row = outdent_to_row.min(range.start.row);
2076 }
2077 }
2078
2079 let suggestion = if outdent_to_row == prev_row {
2080 IndentSuggestion {
2081 basis_row: prev_row,
2082 indent: false,
2083 }
2084 } else if indent_from_prev_row {
2085 IndentSuggestion {
2086 basis_row: prev_row,
2087 indent: true,
2088 }
2089 } else if outdent_to_row < prev_row {
2090 IndentSuggestion {
2091 basis_row: outdent_to_row,
2092 indent: false,
2093 }
2094 } else {
2095 IndentSuggestion {
2096 basis_row: prev_row,
2097 indent: false,
2098 }
2099 };
2100
2101 prev_row = row;
2102 suggestion
2103 }))
2104 } else {
2105 None
2106 }
2107 }
2108
2109 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2110 while row > 0 {
2111 row -= 1;
2112 if !self.is_line_blank(row) {
2113 return Some(row);
2114 }
2115 }
2116 None
2117 }
2118
2119 pub fn chunks<'a, T: ToOffset>(&'a self, range: Range<T>) -> BufferChunks<'a> {
2120 let range = range.start.to_offset(self)..range.end.to_offset(self);
2121
2122 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
2123 for entry in self.diagnostics_in_range::<_, usize>(range.clone()) {
2124 diagnostic_endpoints.push(DiagnosticEndpoint {
2125 offset: entry.range.start,
2126 is_start: true,
2127 severity: entry.diagnostic.severity,
2128 });
2129 diagnostic_endpoints.push(DiagnosticEndpoint {
2130 offset: entry.range.end,
2131 is_start: false,
2132 severity: entry.diagnostic.severity,
2133 });
2134 }
2135 diagnostic_endpoints.sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2136
2137 BufferChunks::new(
2138 self.text.as_rope(),
2139 range,
2140 self.tree.as_ref(),
2141 self.grammar(),
2142 diagnostic_endpoints,
2143 )
2144 }
2145
2146 pub fn language(&self) -> Option<&Arc<Language>> {
2147 self.language.as_ref()
2148 }
2149
2150 fn grammar(&self) -> Option<&Arc<Grammar>> {
2151 self.language
2152 .as_ref()
2153 .and_then(|language| language.grammar.as_ref())
2154 }
2155
2156 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2157 if let Some(tree) = self.tree.as_ref() {
2158 let root = tree.root_node();
2159 let range = range.start.to_offset(self)..range.end.to_offset(self);
2160 let mut node = root.descendant_for_byte_range(range.start, range.end);
2161 while node.map_or(false, |n| n.byte_range() == range) {
2162 node = node.unwrap().parent();
2163 }
2164 node.map(|n| n.byte_range())
2165 } else {
2166 None
2167 }
2168 }
2169
2170 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2171 let tree = self.tree.as_ref()?;
2172 let grammar = self
2173 .language
2174 .as_ref()
2175 .and_then(|language| language.grammar.as_ref())?;
2176
2177 let mut cursor = QueryCursorHandle::new();
2178 let matches = cursor.matches(
2179 &grammar.outline_query,
2180 tree.root_node(),
2181 TextProvider(self.as_rope()),
2182 );
2183
2184 let mut chunks = self.chunks(0..self.len());
2185
2186 let item_capture_ix = grammar.outline_query.capture_index_for_name("item")?;
2187 let name_capture_ix = grammar.outline_query.capture_index_for_name("name")?;
2188 let context_capture_ix = grammar
2189 .outline_query
2190 .capture_index_for_name("context")
2191 .unwrap_or(u32::MAX);
2192
2193 let mut stack = Vec::<Range<usize>>::new();
2194 let items = matches
2195 .filter_map(|mat| {
2196 let item_node = mat.nodes_for_capture_index(item_capture_ix).next()?;
2197 let range = item_node.start_byte()..item_node.end_byte();
2198 let mut text = String::new();
2199 let mut name_ranges = Vec::new();
2200 let mut highlight_ranges = Vec::new();
2201
2202 for capture in mat.captures {
2203 let node_is_name;
2204 if capture.index == name_capture_ix {
2205 node_is_name = true;
2206 } else if capture.index == context_capture_ix {
2207 node_is_name = false;
2208 } else {
2209 continue;
2210 }
2211
2212 let range = capture.node.start_byte()..capture.node.end_byte();
2213 if !text.is_empty() {
2214 text.push(' ');
2215 }
2216 if node_is_name {
2217 let mut start = text.len();
2218 let end = start + range.len();
2219
2220 // When multiple names are captured, then the matcheable text
2221 // includes the whitespace in between the names.
2222 if !name_ranges.is_empty() {
2223 start -= 1;
2224 }
2225
2226 name_ranges.push(start..end);
2227 }
2228
2229 let mut offset = range.start;
2230 chunks.seek(offset);
2231 while let Some(mut chunk) = chunks.next() {
2232 if chunk.text.len() > range.end - offset {
2233 chunk.text = &chunk.text[0..(range.end - offset)];
2234 offset = range.end;
2235 } else {
2236 offset += chunk.text.len();
2237 }
2238 let style = chunk
2239 .highlight_id
2240 .zip(theme)
2241 .and_then(|(highlight, theme)| highlight.style(theme));
2242 if let Some(style) = style {
2243 let start = text.len();
2244 let end = start + chunk.text.len();
2245 highlight_ranges.push((start..end, style));
2246 }
2247 text.push_str(chunk.text);
2248 if offset >= range.end {
2249 break;
2250 }
2251 }
2252 }
2253
2254 while stack.last().map_or(false, |prev_range| {
2255 !prev_range.contains(&range.start) || !prev_range.contains(&range.end)
2256 }) {
2257 stack.pop();
2258 }
2259 stack.push(range.clone());
2260
2261 Some(OutlineItem {
2262 depth: stack.len() - 1,
2263 range: self.anchor_after(range.start)..self.anchor_before(range.end),
2264 text,
2265 highlight_ranges,
2266 name_ranges,
2267 })
2268 })
2269 .collect::<Vec<_>>();
2270
2271 if items.is_empty() {
2272 None
2273 } else {
2274 Some(Outline::new(items))
2275 }
2276 }
2277
2278 pub fn enclosing_bracket_ranges<T: ToOffset>(
2279 &self,
2280 range: Range<T>,
2281 ) -> Option<(Range<usize>, Range<usize>)> {
2282 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
2283 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
2284 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
2285
2286 // Find bracket pairs that *inclusively* contain the given range.
2287 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
2288 let mut cursor = QueryCursorHandle::new();
2289 let matches = cursor.set_byte_range(range).matches(
2290 &grammar.brackets_query,
2291 tree.root_node(),
2292 TextProvider(self.as_rope()),
2293 );
2294
2295 // Get the ranges of the innermost pair of brackets.
2296 matches
2297 .filter_map(|mat| {
2298 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
2299 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
2300 Some((open.byte_range(), close.byte_range()))
2301 })
2302 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
2303 }
2304
2305 /*
2306 impl BufferSnapshot
2307 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, impl Iterator<Item = &Selection<Anchor>>)>
2308 pub fn remote_selections_in_range(&self, Range<Anchor>) -> impl Iterator<Item = (ReplicaId, i
2309 */
2310
2311 pub fn remote_selections_in_range<'a>(
2312 &'a self,
2313 range: Range<Anchor>,
2314 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
2315 {
2316 self.remote_selections
2317 .iter()
2318 .filter(|(replica_id, set)| {
2319 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2320 })
2321 .map(move |(replica_id, set)| {
2322 let start_ix = match set.selections.binary_search_by(|probe| {
2323 probe
2324 .end
2325 .cmp(&range.start, self)
2326 .unwrap()
2327 .then(Ordering::Greater)
2328 }) {
2329 Ok(ix) | Err(ix) => ix,
2330 };
2331 let end_ix = match set.selections.binary_search_by(|probe| {
2332 probe
2333 .start
2334 .cmp(&range.end, self)
2335 .unwrap()
2336 .then(Ordering::Less)
2337 }) {
2338 Ok(ix) | Err(ix) => ix,
2339 };
2340
2341 (*replica_id, set.selections[start_ix..end_ix].iter())
2342 })
2343 }
2344
2345 pub fn diagnostics_in_range<'a, T, O>(
2346 &'a self,
2347 search_range: Range<T>,
2348 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2349 where
2350 T: 'a + Clone + ToOffset,
2351 O: 'a + FromAnchor,
2352 {
2353 self.diagnostics.range(search_range.clone(), self, true)
2354 }
2355
2356 pub fn diagnostic_groups(&self) -> Vec<DiagnosticGroup<Anchor>> {
2357 let mut groups = Vec::new();
2358 self.diagnostics.groups(&mut groups, self);
2359 groups
2360 }
2361
2362 pub fn diagnostic_group<'a, O>(
2363 &'a self,
2364 group_id: usize,
2365 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2366 where
2367 O: 'a + FromAnchor,
2368 {
2369 self.diagnostics.group(group_id, self)
2370 }
2371
2372 pub fn diagnostics_update_count(&self) -> usize {
2373 self.diagnostics_update_count
2374 }
2375
2376 pub fn parse_count(&self) -> usize {
2377 self.parse_count
2378 }
2379
2380 pub fn selections_update_count(&self) -> usize {
2381 self.selections_update_count
2382 }
2383}
2384
2385impl Clone for BufferSnapshot {
2386 fn clone(&self) -> Self {
2387 Self {
2388 text: self.text.clone(),
2389 tree: self.tree.clone(),
2390 remote_selections: self.remote_selections.clone(),
2391 diagnostics: self.diagnostics.clone(),
2392 selections_update_count: self.selections_update_count,
2393 diagnostics_update_count: self.diagnostics_update_count,
2394 is_parsing: self.is_parsing,
2395 language: self.language.clone(),
2396 parse_count: self.parse_count,
2397 }
2398 }
2399}
2400
2401impl Deref for BufferSnapshot {
2402 type Target = text::BufferSnapshot;
2403
2404 fn deref(&self) -> &Self::Target {
2405 &self.text
2406 }
2407}
2408
2409impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
2410 type I = ByteChunks<'a>;
2411
2412 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
2413 ByteChunks(self.0.chunks_in_range(node.byte_range()))
2414 }
2415}
2416
2417struct ByteChunks<'a>(rope::Chunks<'a>);
2418
2419impl<'a> Iterator for ByteChunks<'a> {
2420 type Item = &'a [u8];
2421
2422 fn next(&mut self) -> Option<Self::Item> {
2423 self.0.next().map(str::as_bytes)
2424 }
2425}
2426
2427unsafe impl<'a> Send for BufferChunks<'a> {}
2428
2429impl<'a> BufferChunks<'a> {
2430 pub(crate) fn new(
2431 text: &'a Rope,
2432 range: Range<usize>,
2433 tree: Option<&'a Tree>,
2434 grammar: Option<&'a Arc<Grammar>>,
2435 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2436 ) -> Self {
2437 let mut highlights = None;
2438 if let Some((grammar, tree)) = grammar.zip(tree) {
2439 let mut query_cursor = QueryCursorHandle::new();
2440
2441 // TODO - add a Tree-sitter API to remove the need for this.
2442 let cursor = unsafe {
2443 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
2444 };
2445 let captures = cursor.set_byte_range(range.clone()).captures(
2446 &grammar.highlights_query,
2447 tree.root_node(),
2448 TextProvider(text),
2449 );
2450 highlights = Some(BufferChunkHighlights {
2451 captures,
2452 next_capture: None,
2453 stack: Default::default(),
2454 highlight_map: grammar.highlight_map(),
2455 _query_cursor: query_cursor,
2456 })
2457 }
2458
2459 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2460 let chunks = text.chunks_in_range(range.clone());
2461
2462 BufferChunks {
2463 range,
2464 chunks,
2465 diagnostic_endpoints,
2466 error_depth: 0,
2467 warning_depth: 0,
2468 information_depth: 0,
2469 hint_depth: 0,
2470 highlights,
2471 }
2472 }
2473
2474 pub fn seek(&mut self, offset: usize) {
2475 self.range.start = offset;
2476 self.chunks.seek(self.range.start);
2477 if let Some(highlights) = self.highlights.as_mut() {
2478 highlights
2479 .stack
2480 .retain(|(end_offset, _)| *end_offset > offset);
2481 if let Some((mat, capture_ix)) = &highlights.next_capture {
2482 let capture = mat.captures[*capture_ix as usize];
2483 if offset >= capture.node.start_byte() {
2484 let next_capture_end = capture.node.end_byte();
2485 if offset < next_capture_end {
2486 highlights.stack.push((
2487 next_capture_end,
2488 highlights.highlight_map.get(capture.index),
2489 ));
2490 }
2491 highlights.next_capture.take();
2492 }
2493 }
2494 highlights.captures.set_byte_range(self.range.clone());
2495 }
2496 }
2497
2498 pub fn offset(&self) -> usize {
2499 self.range.start
2500 }
2501
2502 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2503 let depth = match endpoint.severity {
2504 DiagnosticSeverity::ERROR => &mut self.error_depth,
2505 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2506 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2507 DiagnosticSeverity::HINT => &mut self.hint_depth,
2508 _ => return,
2509 };
2510 if endpoint.is_start {
2511 *depth += 1;
2512 } else {
2513 *depth -= 1;
2514 }
2515 }
2516
2517 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
2518 if self.error_depth > 0 {
2519 Some(DiagnosticSeverity::ERROR)
2520 } else if self.warning_depth > 0 {
2521 Some(DiagnosticSeverity::WARNING)
2522 } else if self.information_depth > 0 {
2523 Some(DiagnosticSeverity::INFORMATION)
2524 } else if self.hint_depth > 0 {
2525 Some(DiagnosticSeverity::HINT)
2526 } else {
2527 None
2528 }
2529 }
2530}
2531
2532impl<'a> Iterator for BufferChunks<'a> {
2533 type Item = Chunk<'a>;
2534
2535 fn next(&mut self) -> Option<Self::Item> {
2536 let mut next_capture_start = usize::MAX;
2537 let mut next_diagnostic_endpoint = usize::MAX;
2538
2539 if let Some(highlights) = self.highlights.as_mut() {
2540 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2541 if *parent_capture_end <= self.range.start {
2542 highlights.stack.pop();
2543 } else {
2544 break;
2545 }
2546 }
2547
2548 if highlights.next_capture.is_none() {
2549 highlights.next_capture = highlights.captures.next();
2550 }
2551
2552 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2553 let capture = mat.captures[*capture_ix as usize];
2554 if self.range.start < capture.node.start_byte() {
2555 next_capture_start = capture.node.start_byte();
2556 break;
2557 } else {
2558 let highlight_id = highlights.highlight_map.get(capture.index);
2559 highlights
2560 .stack
2561 .push((capture.node.end_byte(), highlight_id));
2562 highlights.next_capture = highlights.captures.next();
2563 }
2564 }
2565 }
2566
2567 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2568 if endpoint.offset <= self.range.start {
2569 self.update_diagnostic_depths(endpoint);
2570 self.diagnostic_endpoints.next();
2571 } else {
2572 next_diagnostic_endpoint = endpoint.offset;
2573 break;
2574 }
2575 }
2576
2577 if let Some(chunk) = self.chunks.peek() {
2578 let chunk_start = self.range.start;
2579 let mut chunk_end = (self.chunks.offset() + chunk.len())
2580 .min(next_capture_start)
2581 .min(next_diagnostic_endpoint);
2582 let mut highlight_id = None;
2583 if let Some(highlights) = self.highlights.as_ref() {
2584 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2585 chunk_end = chunk_end.min(*parent_capture_end);
2586 highlight_id = Some(*parent_highlight_id);
2587 }
2588 }
2589
2590 let slice =
2591 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2592 self.range.start = chunk_end;
2593 if self.range.start == self.chunks.offset() + chunk.len() {
2594 self.chunks.next().unwrap();
2595 }
2596
2597 Some(Chunk {
2598 text: slice,
2599 highlight_id,
2600 diagnostic: self.current_diagnostic_severity(),
2601 })
2602 } else {
2603 None
2604 }
2605 }
2606}
2607
2608impl QueryCursorHandle {
2609 pub(crate) fn new() -> Self {
2610 QueryCursorHandle(Some(
2611 QUERY_CURSORS
2612 .lock()
2613 .pop()
2614 .unwrap_or_else(|| QueryCursor::new()),
2615 ))
2616 }
2617}
2618
2619impl Deref for QueryCursorHandle {
2620 type Target = QueryCursor;
2621
2622 fn deref(&self) -> &Self::Target {
2623 self.0.as_ref().unwrap()
2624 }
2625}
2626
2627impl DerefMut for QueryCursorHandle {
2628 fn deref_mut(&mut self) -> &mut Self::Target {
2629 self.0.as_mut().unwrap()
2630 }
2631}
2632
2633impl Drop for QueryCursorHandle {
2634 fn drop(&mut self) {
2635 let mut cursor = self.0.take().unwrap();
2636 cursor.set_byte_range(0..usize::MAX);
2637 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2638 QUERY_CURSORS.lock().push(cursor)
2639 }
2640}
2641
2642trait ToTreeSitterPoint {
2643 fn to_ts_point(self) -> tree_sitter::Point;
2644 fn from_ts_point(point: tree_sitter::Point) -> Self;
2645}
2646
2647impl ToTreeSitterPoint for Point {
2648 fn to_ts_point(self) -> tree_sitter::Point {
2649 tree_sitter::Point::new(self.row as usize, self.column as usize)
2650 }
2651
2652 fn from_ts_point(point: tree_sitter::Point) -> Self {
2653 Point::new(point.row as u32, point.column as u32)
2654 }
2655}
2656
2657impl operation_queue::Operation for Operation {
2658 fn lamport_timestamp(&self) -> clock::Lamport {
2659 match self {
2660 Operation::Buffer(_) => {
2661 unreachable!("buffer operations should never be deferred at this layer")
2662 }
2663 Operation::UpdateDiagnostics {
2664 lamport_timestamp, ..
2665 }
2666 | Operation::UpdateSelections {
2667 lamport_timestamp, ..
2668 } => *lamport_timestamp,
2669 Operation::UpdateCompletionTriggers { .. } => {
2670 unreachable!("updating completion triggers should never be deferred")
2671 }
2672 }
2673 }
2674}
2675
2676impl Default for Diagnostic {
2677 fn default() -> Self {
2678 Self {
2679 code: Default::default(),
2680 severity: DiagnosticSeverity::ERROR,
2681 message: Default::default(),
2682 group_id: Default::default(),
2683 is_primary: Default::default(),
2684 is_valid: true,
2685 is_disk_based: false,
2686 }
2687 }
2688}
2689
2690impl<T> Completion<T> {
2691 pub fn label(&self) -> &str {
2692 self.label.as_deref().unwrap_or(&self.lsp_completion.label)
2693 }
2694
2695 pub fn filter_range(&self) -> Range<usize> {
2696 if let Some(filter_text) = self.lsp_completion.filter_text.as_deref() {
2697 if let Some(start) = self.label().find(filter_text) {
2698 start..start + filter_text.len()
2699 } else {
2700 0..self.label().len()
2701 }
2702 } else {
2703 0..self.label().len()
2704 }
2705 }
2706
2707 pub fn sort_key(&self) -> (usize, &str) {
2708 let kind_key = match self.lsp_completion.kind {
2709 Some(lsp::CompletionItemKind::VARIABLE) => 0,
2710 _ => 1,
2711 };
2712 (kind_key, &self.label()[self.filter_range()])
2713 }
2714
2715 pub fn is_snippet(&self) -> bool {
2716 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
2717 }
2718}
2719
2720pub fn contiguous_ranges(
2721 values: impl Iterator<Item = u32>,
2722 max_len: usize,
2723) -> impl Iterator<Item = Range<u32>> {
2724 let mut values = values.into_iter();
2725 let mut current_range: Option<Range<u32>> = None;
2726 std::iter::from_fn(move || loop {
2727 if let Some(value) = values.next() {
2728 if let Some(range) = &mut current_range {
2729 if value == range.end && range.len() < max_len {
2730 range.end += 1;
2731 continue;
2732 }
2733 }
2734
2735 let prev_range = current_range.clone();
2736 current_range = Some(value..(value + 1));
2737 if prev_range.is_some() {
2738 return prev_range;
2739 }
2740 } else {
2741 return current_range.take();
2742 }
2743 })
2744}