1use crate::diagnostic_set::{DiagnosticEntry, DiagnosticGroup};
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use collections::hash_map;
11use futures::FutureExt as _;
12use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
13use lazy_static::lazy_static;
14use lsp::LanguageServer;
15use parking_lot::Mutex;
16use postage::{prelude::Stream, sink::Sink, watch};
17use similar::{ChangeTag, TextDiff};
18use smol::future::yield_now;
19use std::{
20 any::Any,
21 cell::RefCell,
22 cmp::{self, Ordering},
23 collections::{BTreeMap, HashMap, HashSet},
24 ffi::OsString,
25 future::Future,
26 iter::{Iterator, Peekable},
27 ops::{Deref, DerefMut, Range},
28 path::{Path, PathBuf},
29 str,
30 sync::Arc,
31 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
32 vec,
33};
34use sum_tree::TreeMap;
35use text::operation_queue::OperationQueue;
36pub use text::{Buffer as TextBuffer, Operation as _, *};
37use theme::SyntaxTheme;
38use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
39use util::{post_inc, TryFutureExt as _};
40
41#[cfg(any(test, feature = "test-support"))]
42pub use tree_sitter_rust;
43
44pub use lsp::DiagnosticSeverity;
45
46thread_local! {
47 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
48}
49
50lazy_static! {
51 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
52}
53
54// TODO - Make this configurable
55const INDENT_SIZE: u32 = 4;
56
57pub struct Buffer {
58 text: TextBuffer,
59 file: Option<Box<dyn File>>,
60 saved_version: clock::Global,
61 saved_mtime: SystemTime,
62 language: Option<Arc<Language>>,
63 autoindent_requests: Vec<Arc<AutoindentRequest>>,
64 pending_autoindent: Option<Task<()>>,
65 sync_parse_timeout: Duration,
66 syntax_tree: Mutex<Option<SyntaxTree>>,
67 parsing_in_background: bool,
68 parse_count: usize,
69 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
70 diagnostics: DiagnosticSet,
71 diagnostics_update_count: usize,
72 next_diagnostic_group_id: usize,
73 language_server: Option<LanguageServerState>,
74 deferred_ops: OperationQueue<Operation>,
75 #[cfg(test)]
76 pub(crate) operations: Vec<Operation>,
77}
78
79pub struct BufferSnapshot {
80 text: text::BufferSnapshot,
81 tree: Option<Tree>,
82 diagnostics: DiagnosticSet,
83 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
84 diagnostics_update_count: usize,
85 is_parsing: bool,
86 language: Option<Arc<Language>>,
87 parse_count: usize,
88}
89
90#[derive(Clone, Debug, PartialEq, Eq)]
91pub struct Diagnostic {
92 pub source: Option<String>,
93 pub code: Option<String>,
94 pub severity: DiagnosticSeverity,
95 pub message: String,
96 pub group_id: usize,
97 pub is_valid: bool,
98 pub is_primary: bool,
99}
100
101struct LanguageServerState {
102 server: Arc<LanguageServer>,
103 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
104 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
105 next_version: usize,
106 _maintain_server: Task<Option<()>>,
107}
108
109#[derive(Clone)]
110struct LanguageServerSnapshot {
111 buffer_snapshot: text::BufferSnapshot,
112 version: usize,
113 path: Arc<Path>,
114}
115
116#[derive(Clone, Debug)]
117pub enum Operation {
118 Buffer(text::Operation),
119 UpdateDiagnostics {
120 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
121 lamport_timestamp: clock::Lamport,
122 },
123 UpdateSelections {
124 replica_id: ReplicaId,
125 selections: Arc<[Selection<Anchor>]>,
126 lamport_timestamp: clock::Lamport,
127 },
128 RemoveSelections {
129 replica_id: ReplicaId,
130 lamport_timestamp: clock::Lamport,
131 },
132}
133
134#[derive(Clone, Debug, Eq, PartialEq)]
135pub enum Event {
136 Edited,
137 Dirtied,
138 Saved,
139 FileHandleChanged,
140 Reloaded,
141 Reparsed,
142 DiagnosticsUpdated,
143 Closed,
144}
145
146pub trait File {
147 fn worktree_id(&self) -> usize;
148
149 fn entry_id(&self) -> Option<usize>;
150
151 fn mtime(&self) -> SystemTime;
152
153 /// Returns the path of this file relative to the worktree's root directory.
154 fn path(&self) -> &Arc<Path>;
155
156 /// Returns the absolute path of this file.
157 fn abs_path(&self) -> Option<PathBuf>;
158
159 /// Returns the path of this file relative to the worktree's parent directory (this means it
160 /// includes the name of the worktree's root folder).
161 fn full_path(&self) -> PathBuf;
162
163 /// Returns the last component of this handle's absolute path. If this handle refers to the root
164 /// of its worktree, then this method will return the name of the worktree itself.
165 fn file_name(&self) -> Option<OsString>;
166
167 fn is_deleted(&self) -> bool;
168
169 fn save(
170 &self,
171 buffer_id: u64,
172 text: Rope,
173 version: clock::Global,
174 cx: &mut MutableAppContext,
175 ) -> Task<Result<(clock::Global, SystemTime)>>;
176
177 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
178
179 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
180
181 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
182
183 fn boxed_clone(&self) -> Box<dyn File>;
184
185 fn as_any(&self) -> &dyn Any;
186}
187
188struct QueryCursorHandle(Option<QueryCursor>);
189
190#[derive(Clone)]
191struct SyntaxTree {
192 tree: Tree,
193 version: clock::Global,
194}
195
196#[derive(Clone)]
197struct AutoindentRequest {
198 before_edit: BufferSnapshot,
199 edited: Vec<Anchor>,
200 inserted: Option<Vec<Range<Anchor>>>,
201}
202
203#[derive(Debug)]
204struct IndentSuggestion {
205 basis_row: u32,
206 indent: bool,
207}
208
209struct TextProvider<'a>(&'a Rope);
210
211struct BufferChunkHighlights<'a> {
212 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
213 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
214 stack: Vec<(usize, HighlightId)>,
215 highlight_map: HighlightMap,
216 theme: &'a SyntaxTheme,
217 _query_cursor: QueryCursorHandle,
218}
219
220pub struct BufferChunks<'a> {
221 range: Range<usize>,
222 chunks: rope::Chunks<'a>,
223 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
224 error_depth: usize,
225 warning_depth: usize,
226 information_depth: usize,
227 hint_depth: usize,
228 highlights: Option<BufferChunkHighlights<'a>>,
229}
230
231#[derive(Clone, Copy, Debug, Default)]
232pub struct Chunk<'a> {
233 pub text: &'a str,
234 pub highlight_style: Option<HighlightStyle>,
235 pub diagnostic: Option<DiagnosticSeverity>,
236}
237
238pub(crate) struct Diff {
239 base_version: clock::Global,
240 new_text: Arc<str>,
241 changes: Vec<(ChangeTag, usize)>,
242}
243
244#[derive(Clone, Copy)]
245struct DiagnosticEndpoint {
246 offset: usize,
247 is_start: bool,
248 severity: DiagnosticSeverity,
249}
250
251impl Buffer {
252 pub fn new<T: Into<Arc<str>>>(
253 replica_id: ReplicaId,
254 base_text: T,
255 cx: &mut ModelContext<Self>,
256 ) -> Self {
257 Self::build(
258 TextBuffer::new(
259 replica_id,
260 cx.model_id() as u64,
261 History::new(base_text.into()),
262 ),
263 None,
264 )
265 }
266
267 pub fn from_file<T: Into<Arc<str>>>(
268 replica_id: ReplicaId,
269 base_text: T,
270 file: Box<dyn File>,
271 cx: &mut ModelContext<Self>,
272 ) -> Self {
273 Self::build(
274 TextBuffer::new(
275 replica_id,
276 cx.model_id() as u64,
277 History::new(base_text.into()),
278 ),
279 Some(file),
280 )
281 }
282
283 pub fn from_proto(
284 replica_id: ReplicaId,
285 message: proto::Buffer,
286 file: Option<Box<dyn File>>,
287 cx: &mut ModelContext<Self>,
288 ) -> Result<Self> {
289 let mut buffer =
290 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
291 let ops = message
292 .history
293 .into_iter()
294 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
295 buffer.apply_ops(ops)?;
296 let mut this = Self::build(buffer, file);
297 for selection_set in message.selections {
298 this.remote_selections.insert(
299 selection_set.replica_id as ReplicaId,
300 proto::deserialize_selections(selection_set.selections),
301 );
302 }
303 this.apply_diagnostic_update(
304 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
305 cx,
306 );
307
308 Ok(this)
309 }
310
311 pub fn to_proto(&self) -> proto::Buffer {
312 proto::Buffer {
313 id: self.remote_id(),
314 content: self.text.base_text().to_string(),
315 history: self
316 .text
317 .history()
318 .map(proto::serialize_edit_operation)
319 .collect(),
320 selections: self
321 .remote_selections
322 .iter()
323 .map(|(replica_id, selections)| proto::SelectionSet {
324 replica_id: *replica_id as u32,
325 selections: proto::serialize_selections(selections),
326 })
327 .collect(),
328 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
329 }
330 }
331
332 pub fn with_language(
333 mut self,
334 language: Option<Arc<Language>>,
335 language_server: Option<Arc<LanguageServer>>,
336 cx: &mut ModelContext<Self>,
337 ) -> Self {
338 self.set_language(language, language_server, cx);
339 self
340 }
341
342 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
343 let saved_mtime;
344 if let Some(file) = file.as_ref() {
345 saved_mtime = file.mtime();
346 } else {
347 saved_mtime = UNIX_EPOCH;
348 }
349
350 Self {
351 saved_mtime,
352 saved_version: buffer.version(),
353 text: buffer,
354 file,
355 syntax_tree: Mutex::new(None),
356 parsing_in_background: false,
357 parse_count: 0,
358 sync_parse_timeout: Duration::from_millis(1),
359 autoindent_requests: Default::default(),
360 pending_autoindent: Default::default(),
361 language: None,
362 remote_selections: Default::default(),
363 diagnostics: Default::default(),
364 diagnostics_update_count: 0,
365 next_diagnostic_group_id: 0,
366 language_server: None,
367 deferred_ops: OperationQueue::new(),
368 #[cfg(test)]
369 operations: Default::default(),
370 }
371 }
372
373 pub fn snapshot(&self) -> BufferSnapshot {
374 BufferSnapshot {
375 text: self.text.snapshot(),
376 tree: self.syntax_tree(),
377 remote_selections: self.remote_selections.clone(),
378 diagnostics: self.diagnostics.clone(),
379 diagnostics_update_count: self.diagnostics_update_count,
380 is_parsing: self.parsing_in_background,
381 language: self.language.clone(),
382 parse_count: self.parse_count,
383 }
384 }
385
386 pub fn file(&self) -> Option<&dyn File> {
387 self.file.as_deref()
388 }
389
390 pub fn save(
391 &mut self,
392 cx: &mut ModelContext<Self>,
393 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
394 let file = self
395 .file
396 .as_ref()
397 .ok_or_else(|| anyhow!("buffer has no file"))?;
398 let text = self.as_rope().clone();
399 let version = self.version();
400 let save = file.save(self.remote_id(), text, version, cx.as_mut());
401 Ok(cx.spawn(|this, mut cx| async move {
402 let (version, mtime) = save.await?;
403 this.update(&mut cx, |this, cx| {
404 this.did_save(version.clone(), mtime, None, cx);
405 });
406 Ok((version, mtime))
407 }))
408 }
409
410 pub fn set_language(
411 &mut self,
412 language: Option<Arc<Language>>,
413 language_server: Option<Arc<lsp::LanguageServer>>,
414 cx: &mut ModelContext<Self>,
415 ) {
416 self.language = language;
417 self.language_server = if let Some(server) = language_server {
418 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
419 Some(LanguageServerState {
420 latest_snapshot: latest_snapshot_tx,
421 pending_snapshots: Default::default(),
422 next_version: 0,
423 server: server.clone(),
424 _maintain_server: cx.background().spawn(
425 async move {
426 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
427 while let Some(snapshot) = latest_snapshot_rx.recv().await {
428 if let Some(snapshot) = snapshot {
429 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
430 if let Some(prev_snapshot) = prev_snapshot {
431 let changes = lsp::DidChangeTextDocumentParams {
432 text_document: lsp::VersionedTextDocumentIdentifier::new(
433 uri,
434 snapshot.version as i32,
435 ),
436 content_changes: snapshot
437 .buffer_snapshot
438 .edits_since::<(PointUtf16, usize)>(
439 prev_snapshot.buffer_snapshot.version(),
440 )
441 .map(|edit| {
442 let edit_start = edit.new.start.0;
443 let edit_end = edit_start
444 + (edit.old.end.0 - edit.old.start.0);
445 let new_text = snapshot
446 .buffer_snapshot
447 .text_for_range(
448 edit.new.start.1..edit.new.end.1,
449 )
450 .collect();
451 lsp::TextDocumentContentChangeEvent {
452 range: Some(lsp::Range::new(
453 lsp::Position::new(
454 edit_start.row,
455 edit_start.column,
456 ),
457 lsp::Position::new(
458 edit_end.row,
459 edit_end.column,
460 ),
461 )),
462 range_length: None,
463 text: new_text,
464 }
465 })
466 .collect(),
467 };
468 server
469 .notify::<lsp::notification::DidChangeTextDocument>(changes)
470 .await?;
471 } else {
472 server
473 .notify::<lsp::notification::DidOpenTextDocument>(
474 lsp::DidOpenTextDocumentParams {
475 text_document: lsp::TextDocumentItem::new(
476 uri,
477 Default::default(),
478 snapshot.version as i32,
479 snapshot.buffer_snapshot.text().to_string(),
480 ),
481 },
482 )
483 .await?;
484 }
485
486 prev_snapshot = Some(snapshot);
487 }
488 }
489 Ok(())
490 }
491 .log_err(),
492 ),
493 })
494 } else {
495 None
496 };
497
498 self.reparse(cx);
499 self.update_language_server();
500 }
501
502 pub fn did_save(
503 &mut self,
504 version: clock::Global,
505 mtime: SystemTime,
506 new_file: Option<Box<dyn File>>,
507 cx: &mut ModelContext<Self>,
508 ) {
509 self.saved_mtime = mtime;
510 self.saved_version = version;
511 if let Some(new_file) = new_file {
512 self.file = Some(new_file);
513 }
514 if let Some(state) = &self.language_server {
515 cx.background()
516 .spawn(
517 state
518 .server
519 .notify::<lsp::notification::DidSaveTextDocument>(
520 lsp::DidSaveTextDocumentParams {
521 text_document: lsp::TextDocumentIdentifier {
522 uri: lsp::Url::from_file_path(
523 self.file.as_ref().unwrap().abs_path().unwrap(),
524 )
525 .unwrap(),
526 },
527 text: None,
528 },
529 ),
530 )
531 .detach()
532 }
533 cx.emit(Event::Saved);
534 }
535
536 pub fn file_updated(
537 &mut self,
538 new_file: Box<dyn File>,
539 cx: &mut ModelContext<Self>,
540 ) -> Option<Task<()>> {
541 let old_file = self.file.as_ref()?;
542 let mut file_changed = false;
543 let mut task = None;
544
545 if new_file.path() != old_file.path() {
546 file_changed = true;
547 }
548
549 if new_file.is_deleted() {
550 if !old_file.is_deleted() {
551 file_changed = true;
552 if !self.is_dirty() {
553 cx.emit(Event::Dirtied);
554 }
555 }
556 } else {
557 let new_mtime = new_file.mtime();
558 if new_mtime != old_file.mtime() {
559 file_changed = true;
560
561 if !self.is_dirty() {
562 task = Some(cx.spawn(|this, mut cx| {
563 async move {
564 let new_text = this.read_with(&cx, |this, cx| {
565 this.file.as_ref().and_then(|file| file.load_local(cx))
566 });
567 if let Some(new_text) = new_text {
568 let new_text = new_text.await?;
569 let diff = this
570 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
571 .await;
572 this.update(&mut cx, |this, cx| {
573 if this.apply_diff(diff, cx) {
574 this.saved_version = this.version();
575 this.saved_mtime = new_mtime;
576 cx.emit(Event::Reloaded);
577 }
578 });
579 }
580 Ok(())
581 }
582 .log_err()
583 .map(drop)
584 }));
585 }
586 }
587 }
588
589 if file_changed {
590 cx.emit(Event::FileHandleChanged);
591 }
592 self.file = Some(new_file);
593 task
594 }
595
596 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
597 cx.emit(Event::Closed);
598 }
599
600 pub fn language(&self) -> Option<&Arc<Language>> {
601 self.language.as_ref()
602 }
603
604 pub fn parse_count(&self) -> usize {
605 self.parse_count
606 }
607
608 pub fn diagnostics_update_count(&self) -> usize {
609 self.diagnostics_update_count
610 }
611
612 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
613 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
614 self.interpolate_tree(syntax_tree);
615 Some(syntax_tree.tree.clone())
616 } else {
617 None
618 }
619 }
620
621 #[cfg(any(test, feature = "test-support"))]
622 pub fn is_parsing(&self) -> bool {
623 self.parsing_in_background
624 }
625
626 #[cfg(test)]
627 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
628 self.sync_parse_timeout = timeout;
629 }
630
631 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
632 if self.parsing_in_background {
633 return false;
634 }
635
636 if let Some(grammar) = self.grammar().cloned() {
637 let old_tree = self.syntax_tree();
638 let text = self.as_rope().clone();
639 let parsed_version = self.version();
640 let parse_task = cx.background().spawn({
641 let grammar = grammar.clone();
642 async move { Self::parse_text(&text, old_tree, &grammar) }
643 });
644
645 match cx
646 .background()
647 .block_with_timeout(self.sync_parse_timeout, parse_task)
648 {
649 Ok(new_tree) => {
650 self.did_finish_parsing(new_tree, parsed_version, cx);
651 return true;
652 }
653 Err(parse_task) => {
654 self.parsing_in_background = true;
655 cx.spawn(move |this, mut cx| async move {
656 let new_tree = parse_task.await;
657 this.update(&mut cx, move |this, cx| {
658 let grammar_changed = this
659 .grammar()
660 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
661 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
662 this.parsing_in_background = false;
663 this.did_finish_parsing(new_tree, parsed_version, cx);
664
665 if parse_again && this.reparse(cx) {
666 return;
667 }
668 });
669 })
670 .detach();
671 }
672 }
673 }
674 false
675 }
676
677 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
678 PARSER.with(|parser| {
679 let mut parser = parser.borrow_mut();
680 parser
681 .set_language(grammar.ts_language)
682 .expect("incompatible grammar");
683 let mut chunks = text.chunks_in_range(0..text.len());
684 let tree = parser
685 .parse_with(
686 &mut move |offset, _| {
687 chunks.seek(offset);
688 chunks.next().unwrap_or("").as_bytes()
689 },
690 old_tree.as_ref(),
691 )
692 .unwrap();
693 tree
694 })
695 }
696
697 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
698 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
699 let (bytes, lines) = edit.flatten();
700 tree.tree.edit(&InputEdit {
701 start_byte: bytes.new.start,
702 old_end_byte: bytes.new.start + bytes.old.len(),
703 new_end_byte: bytes.new.end,
704 start_position: lines.new.start.to_ts_point(),
705 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
706 .to_ts_point(),
707 new_end_position: lines.new.end.to_ts_point(),
708 });
709 }
710 tree.version = self.version();
711 }
712
713 fn did_finish_parsing(
714 &mut self,
715 tree: Tree,
716 version: clock::Global,
717 cx: &mut ModelContext<Self>,
718 ) {
719 self.parse_count += 1;
720 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
721 self.request_autoindent(cx);
722 cx.emit(Event::Reparsed);
723 cx.notify();
724 }
725
726 pub fn update_diagnostics(
727 &mut self,
728 version: Option<i32>,
729 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
730 cx: &mut ModelContext<Self>,
731 ) -> Result<Operation> {
732 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
733 Ordering::Equal
734 .then_with(|| b.is_primary.cmp(&a.is_primary))
735 .then_with(|| a.source.cmp(&b.source))
736 .then_with(|| a.severity.cmp(&b.severity))
737 .then_with(|| a.message.cmp(&b.message))
738 }
739
740 diagnostics.sort_unstable_by(|a, b| {
741 Ordering::Equal
742 .then_with(|| a.range.start.cmp(&b.range.start))
743 .then_with(|| b.range.end.cmp(&a.range.end))
744 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
745 });
746
747 let version = version.map(|version| version as usize);
748 let content = if let Some(version) = version {
749 let language_server = self.language_server.as_mut().unwrap();
750 language_server
751 .pending_snapshots
752 .retain(|&v, _| v >= version);
753 let snapshot = language_server
754 .pending_snapshots
755 .get(&version)
756 .ok_or_else(|| anyhow!("missing snapshot"))?;
757 &snapshot.buffer_snapshot
758 } else {
759 self.deref()
760 };
761
762 let empty_set = HashSet::new();
763 let disk_based_sources = self
764 .language
765 .as_ref()
766 .and_then(|language| language.disk_based_diagnostic_sources())
767 .unwrap_or(&empty_set);
768
769 let mut edits_since_save = content
770 .edits_since::<PointUtf16>(&self.saved_version)
771 .peekable();
772 let mut last_edit_old_end = PointUtf16::zero();
773 let mut last_edit_new_end = PointUtf16::zero();
774 let mut has_disk_based_diagnostics = false;
775 let mut ix = 0;
776 'outer: while ix < diagnostics.len() {
777 let entry = &mut diagnostics[ix];
778 let mut start = entry.range.start;
779 let mut end = entry.range.end;
780
781 // Some diagnostics are based on files on disk instead of buffers'
782 // current contents. Adjust these diagnostics' ranges to reflect
783 // any unsaved edits.
784 if entry
785 .diagnostic
786 .source
787 .as_ref()
788 .map_or(false, |source| disk_based_sources.contains(source))
789 {
790 has_disk_based_diagnostics = true;
791 while let Some(edit) = edits_since_save.peek() {
792 if edit.old.end <= start {
793 last_edit_old_end = edit.old.end;
794 last_edit_new_end = edit.new.end;
795 edits_since_save.next();
796 } else if edit.old.start <= end && edit.old.end >= start {
797 diagnostics.remove(ix);
798 continue 'outer;
799 } else {
800 break;
801 }
802 }
803
804 start = last_edit_new_end + (start - last_edit_old_end);
805 end = last_edit_new_end + (end - last_edit_old_end);
806 }
807
808 entry.range = content.clip_point_utf16(start, Bias::Left)
809 ..content.clip_point_utf16(end, Bias::Right);
810
811 // Expand empty ranges by one character
812 if entry.range.start == entry.range.end {
813 entry.range.end.column += 1;
814 entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right);
815 if entry.range.start == entry.range.end && entry.range.end.column > 0 {
816 entry.range.start.column -= 1;
817 entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left);
818 }
819 }
820 ix += 1;
821 }
822 drop(edits_since_save);
823
824 let mut merged_diagnostics = Vec::with_capacity(diagnostics.len());
825 let mut old_diagnostics = self
826 .diagnostics
827 .iter()
828 .map(|entry| {
829 (
830 entry,
831 entry
832 .diagnostic
833 .source
834 .as_ref()
835 .map_or(false, |source| disk_based_sources.contains(source)),
836 )
837 })
838 .peekable();
839 let mut new_diagnostics = diagnostics
840 .into_iter()
841 .map(|entry| DiagnosticEntry {
842 range: content.anchor_before(entry.range.start)
843 ..content.anchor_after(entry.range.end),
844 diagnostic: entry.diagnostic,
845 })
846 .peekable();
847
848 // Compare the old and new diagnostics for two reasons.
849 // 1. Recycling group ids - diagnostic groups whose primary diagnostic has not
850 // changed should use the same group id as before, so that downstream code
851 // can determine which diagnostics are new.
852 // 2. Preserving disk-based diagnostics - These diagnostic sources are reported
853 // on a less frequent basis than others. If these sources are absent from this
854 // message, then preserve the previous diagnostics for those sources, but mark
855 // them as invalid, and set a time to clear them out.
856 let mut group_id_replacements = HashMap::new();
857 let mut merged_old_disk_based_diagnostics = false;
858 loop {
859 match (old_diagnostics.peek(), new_diagnostics.peek()) {
860 (None, None) => break,
861 (None, Some(_)) => {
862 merged_diagnostics.push(new_diagnostics.next().unwrap());
863 }
864 (Some(_), None) => {
865 let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
866 if is_disk_based && !has_disk_based_diagnostics {
867 let mut old_entry = old_entry.clone();
868 old_entry.diagnostic.is_valid = false;
869 merged_old_disk_based_diagnostics = true;
870 merged_diagnostics.push(old_entry);
871 }
872 }
873 (Some((old, _)), Some(new)) => {
874 let ordering = Ordering::Equal
875 .then_with(|| old.range.start.cmp(&new.range.start, content).unwrap())
876 .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap())
877 .then_with(|| compare_diagnostics(&old.diagnostic, &new.diagnostic));
878 match ordering {
879 Ordering::Less => {
880 let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
881 if is_disk_based && !has_disk_based_diagnostics {
882 let mut old_entry = old_entry.clone();
883 old_entry.diagnostic.is_valid = false;
884 merged_old_disk_based_diagnostics = true;
885 merged_diagnostics.push(old_entry);
886 }
887 }
888 Ordering::Equal => {
889 let (old_entry, _) = old_diagnostics.next().unwrap();
890 let new_entry = new_diagnostics.next().unwrap();
891 if new_entry.diagnostic.is_primary {
892 group_id_replacements.insert(
893 new_entry.diagnostic.group_id,
894 old_entry.diagnostic.group_id,
895 );
896 }
897 merged_diagnostics.push(new_entry);
898 }
899 Ordering::Greater => {
900 let new_entry = new_diagnostics.next().unwrap();
901 merged_diagnostics.push(new_entry);
902 }
903 }
904 }
905 }
906 }
907 drop(old_diagnostics);
908
909 // Having determined which group ids should be recycled, renumber all of
910 // groups. Any new group that does not correspond to an old group receives
911 // a brand new group id.
912 let mut next_diagnostic_group_id = self.next_diagnostic_group_id;
913 for entry in &mut merged_diagnostics {
914 if entry.diagnostic.is_valid {
915 match group_id_replacements.entry(entry.diagnostic.group_id) {
916 hash_map::Entry::Occupied(e) => entry.diagnostic.group_id = *e.get(),
917 hash_map::Entry::Vacant(e) => {
918 entry.diagnostic.group_id = post_inc(&mut next_diagnostic_group_id);
919 e.insert(entry.diagnostic.group_id);
920 }
921 }
922 }
923 }
924
925 self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content);
926 self.diagnostics_update_count += 1;
927 self.next_diagnostic_group_id = next_diagnostic_group_id;
928
929 if merged_old_disk_based_diagnostics {
930 // TODO - spawn a task to clear the old ones
931 }
932
933 cx.notify();
934 cx.emit(Event::DiagnosticsUpdated);
935 Ok(Operation::UpdateDiagnostics {
936 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
937 lamport_timestamp: self.text.lamport_clock.tick(),
938 })
939 }
940
941 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
942 if let Some(indent_columns) = self.compute_autoindents() {
943 let indent_columns = cx.background().spawn(indent_columns);
944 match cx
945 .background()
946 .block_with_timeout(Duration::from_micros(500), indent_columns)
947 {
948 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
949 Err(indent_columns) => {
950 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
951 let indent_columns = indent_columns.await;
952 this.update(&mut cx, |this, cx| {
953 this.apply_autoindents(indent_columns, cx);
954 });
955 }));
956 }
957 }
958 }
959 }
960
961 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
962 let max_rows_between_yields = 100;
963 let snapshot = self.snapshot();
964 if snapshot.language.is_none()
965 || snapshot.tree.is_none()
966 || self.autoindent_requests.is_empty()
967 {
968 return None;
969 }
970
971 let autoindent_requests = self.autoindent_requests.clone();
972 Some(async move {
973 let mut indent_columns = BTreeMap::new();
974 for request in autoindent_requests {
975 let old_to_new_rows = request
976 .edited
977 .iter()
978 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
979 .zip(
980 request
981 .edited
982 .iter()
983 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
984 )
985 .collect::<BTreeMap<u32, u32>>();
986
987 let mut old_suggestions = HashMap::<u32, u32>::default();
988 let old_edited_ranges =
989 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
990 for old_edited_range in old_edited_ranges {
991 let suggestions = request
992 .before_edit
993 .suggest_autoindents(old_edited_range.clone())
994 .into_iter()
995 .flatten();
996 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
997 let indentation_basis = old_to_new_rows
998 .get(&suggestion.basis_row)
999 .and_then(|from_row| old_suggestions.get(from_row).copied())
1000 .unwrap_or_else(|| {
1001 request
1002 .before_edit
1003 .indent_column_for_line(suggestion.basis_row)
1004 });
1005 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1006 old_suggestions.insert(
1007 *old_to_new_rows.get(&old_row).unwrap(),
1008 indentation_basis + delta,
1009 );
1010 }
1011 yield_now().await;
1012 }
1013
1014 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1015 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1016 let new_edited_row_ranges =
1017 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1018 for new_edited_row_range in new_edited_row_ranges {
1019 let suggestions = snapshot
1020 .suggest_autoindents(new_edited_row_range.clone())
1021 .into_iter()
1022 .flatten();
1023 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1024 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1025 let new_indentation = indent_columns
1026 .get(&suggestion.basis_row)
1027 .copied()
1028 .unwrap_or_else(|| {
1029 snapshot.indent_column_for_line(suggestion.basis_row)
1030 })
1031 + delta;
1032 if old_suggestions
1033 .get(&new_row)
1034 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1035 {
1036 indent_columns.insert(new_row, new_indentation);
1037 }
1038 }
1039 yield_now().await;
1040 }
1041
1042 if let Some(inserted) = request.inserted.as_ref() {
1043 let inserted_row_ranges = contiguous_ranges(
1044 inserted
1045 .iter()
1046 .map(|range| range.to_point(&snapshot))
1047 .flat_map(|range| range.start.row..range.end.row + 1),
1048 max_rows_between_yields,
1049 );
1050 for inserted_row_range in inserted_row_ranges {
1051 let suggestions = snapshot
1052 .suggest_autoindents(inserted_row_range.clone())
1053 .into_iter()
1054 .flatten();
1055 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1056 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1057 let new_indentation = indent_columns
1058 .get(&suggestion.basis_row)
1059 .copied()
1060 .unwrap_or_else(|| {
1061 snapshot.indent_column_for_line(suggestion.basis_row)
1062 })
1063 + delta;
1064 indent_columns.insert(row, new_indentation);
1065 }
1066 yield_now().await;
1067 }
1068 }
1069 }
1070 indent_columns
1071 })
1072 }
1073
1074 fn apply_autoindents(
1075 &mut self,
1076 indent_columns: BTreeMap<u32, u32>,
1077 cx: &mut ModelContext<Self>,
1078 ) {
1079 self.start_transaction();
1080 for (row, indent_column) in &indent_columns {
1081 self.set_indent_column_for_line(*row, *indent_column, cx);
1082 }
1083 self.end_transaction(cx);
1084 }
1085
1086 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1087 let current_column = self.indent_column_for_line(row);
1088 if column > current_column {
1089 let offset = Point::new(row, 0).to_offset(&*self);
1090 self.edit(
1091 [offset..offset],
1092 " ".repeat((column - current_column) as usize),
1093 cx,
1094 );
1095 } else if column < current_column {
1096 self.edit(
1097 [Point::new(row, 0)..Point::new(row, current_column - column)],
1098 "",
1099 cx,
1100 );
1101 }
1102 }
1103
1104 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1105 // TODO: it would be nice to not allocate here.
1106 let old_text = self.text();
1107 let base_version = self.version();
1108 cx.background().spawn(async move {
1109 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1110 .iter_all_changes()
1111 .map(|c| (c.tag(), c.value().len()))
1112 .collect::<Vec<_>>();
1113 Diff {
1114 base_version,
1115 new_text,
1116 changes,
1117 }
1118 })
1119 }
1120
1121 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1122 if self.version == diff.base_version {
1123 self.start_transaction();
1124 let mut offset = 0;
1125 for (tag, len) in diff.changes {
1126 let range = offset..(offset + len);
1127 match tag {
1128 ChangeTag::Equal => offset += len,
1129 ChangeTag::Delete => self.edit(Some(range), "", cx),
1130 ChangeTag::Insert => {
1131 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1132 offset += len;
1133 }
1134 }
1135 }
1136 self.end_transaction(cx);
1137 true
1138 } else {
1139 false
1140 }
1141 }
1142
1143 pub fn is_dirty(&self) -> bool {
1144 !self.saved_version.ge(&self.version)
1145 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1146 }
1147
1148 pub fn has_conflict(&self) -> bool {
1149 !self.saved_version.ge(&self.version)
1150 && self
1151 .file
1152 .as_ref()
1153 .map_or(false, |file| file.mtime() > self.saved_mtime)
1154 }
1155
1156 pub fn subscribe(&mut self) -> Subscription {
1157 self.text.subscribe()
1158 }
1159
1160 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1161 self.start_transaction_at(Instant::now())
1162 }
1163
1164 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1165 self.text.start_transaction_at(now)
1166 }
1167
1168 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1169 self.end_transaction_at(Instant::now(), cx)
1170 }
1171
1172 pub fn end_transaction_at(
1173 &mut self,
1174 now: Instant,
1175 cx: &mut ModelContext<Self>,
1176 ) -> Option<TransactionId> {
1177 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1178 let was_dirty = start_version != self.saved_version;
1179 self.did_edit(&start_version, was_dirty, cx);
1180 Some(transaction_id)
1181 } else {
1182 None
1183 }
1184 }
1185
1186 pub fn set_active_selections(
1187 &mut self,
1188 selections: Arc<[Selection<Anchor>]>,
1189 cx: &mut ModelContext<Self>,
1190 ) {
1191 let lamport_timestamp = self.text.lamport_clock.tick();
1192 self.remote_selections
1193 .insert(self.text.replica_id(), selections.clone());
1194 self.send_operation(
1195 Operation::UpdateSelections {
1196 replica_id: self.text.replica_id(),
1197 selections,
1198 lamport_timestamp,
1199 },
1200 cx,
1201 );
1202 }
1203
1204 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1205 let lamport_timestamp = self.text.lamport_clock.tick();
1206 self.send_operation(
1207 Operation::RemoveSelections {
1208 replica_id: self.text.replica_id(),
1209 lamport_timestamp,
1210 },
1211 cx,
1212 );
1213 }
1214
1215 fn update_language_server(&mut self) {
1216 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1217 language_server
1218 } else {
1219 return;
1220 };
1221 let abs_path = self
1222 .file
1223 .as_ref()
1224 .map_or(Path::new("/").to_path_buf(), |file| {
1225 file.abs_path().unwrap()
1226 });
1227
1228 let version = post_inc(&mut language_server.next_version);
1229 let snapshot = LanguageServerSnapshot {
1230 buffer_snapshot: self.text.snapshot(),
1231 version,
1232 path: Arc::from(abs_path),
1233 };
1234 language_server
1235 .pending_snapshots
1236 .insert(version, snapshot.clone());
1237 let _ = language_server
1238 .latest_snapshot
1239 .blocking_send(Some(snapshot));
1240 }
1241
1242 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1243 where
1244 I: IntoIterator<Item = Range<S>>,
1245 S: ToOffset,
1246 T: Into<String>,
1247 {
1248 self.edit_internal(ranges_iter, new_text, false, cx)
1249 }
1250
1251 pub fn edit_with_autoindent<I, S, T>(
1252 &mut self,
1253 ranges_iter: I,
1254 new_text: T,
1255 cx: &mut ModelContext<Self>,
1256 ) where
1257 I: IntoIterator<Item = Range<S>>,
1258 S: ToOffset,
1259 T: Into<String>,
1260 {
1261 self.edit_internal(ranges_iter, new_text, true, cx)
1262 }
1263
1264 pub fn edit_internal<I, S, T>(
1265 &mut self,
1266 ranges_iter: I,
1267 new_text: T,
1268 autoindent: bool,
1269 cx: &mut ModelContext<Self>,
1270 ) where
1271 I: IntoIterator<Item = Range<S>>,
1272 S: ToOffset,
1273 T: Into<String>,
1274 {
1275 let new_text = new_text.into();
1276
1277 // Skip invalid ranges and coalesce contiguous ones.
1278 let mut ranges: Vec<Range<usize>> = Vec::new();
1279 for range in ranges_iter {
1280 let range = range.start.to_offset(self)..range.end.to_offset(self);
1281 if !new_text.is_empty() || !range.is_empty() {
1282 if let Some(prev_range) = ranges.last_mut() {
1283 if prev_range.end >= range.start {
1284 prev_range.end = cmp::max(prev_range.end, range.end);
1285 } else {
1286 ranges.push(range);
1287 }
1288 } else {
1289 ranges.push(range);
1290 }
1291 }
1292 }
1293 if ranges.is_empty() {
1294 return;
1295 }
1296
1297 self.start_transaction();
1298 self.pending_autoindent.take();
1299 let autoindent_request = if autoindent && self.language.is_some() {
1300 let before_edit = self.snapshot();
1301 let edited = ranges
1302 .iter()
1303 .filter_map(|range| {
1304 let start = range.start.to_point(self);
1305 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1306 None
1307 } else {
1308 Some(self.anchor_before(range.start))
1309 }
1310 })
1311 .collect();
1312 Some((before_edit, edited))
1313 } else {
1314 None
1315 };
1316
1317 let first_newline_ix = new_text.find('\n');
1318 let new_text_len = new_text.len();
1319
1320 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1321
1322 if let Some((before_edit, edited)) = autoindent_request {
1323 let mut inserted = None;
1324 if let Some(first_newline_ix) = first_newline_ix {
1325 let mut delta = 0isize;
1326 inserted = Some(
1327 ranges
1328 .iter()
1329 .map(|range| {
1330 let start =
1331 (delta + range.start as isize) as usize + first_newline_ix + 1;
1332 let end = (delta + range.start as isize) as usize + new_text_len;
1333 delta +=
1334 (range.end as isize - range.start as isize) + new_text_len as isize;
1335 self.anchor_before(start)..self.anchor_after(end)
1336 })
1337 .collect(),
1338 );
1339 }
1340
1341 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1342 before_edit,
1343 edited,
1344 inserted,
1345 }));
1346 }
1347
1348 self.end_transaction(cx);
1349 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1350 }
1351
1352 fn did_edit(
1353 &mut self,
1354 old_version: &clock::Global,
1355 was_dirty: bool,
1356 cx: &mut ModelContext<Self>,
1357 ) {
1358 if self.edits_since::<usize>(old_version).next().is_none() {
1359 return;
1360 }
1361
1362 self.reparse(cx);
1363 self.update_language_server();
1364
1365 cx.emit(Event::Edited);
1366 if !was_dirty {
1367 cx.emit(Event::Dirtied);
1368 }
1369 cx.notify();
1370 }
1371
1372 fn grammar(&self) -> Option<&Arc<Grammar>> {
1373 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1374 }
1375
1376 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1377 &mut self,
1378 ops: I,
1379 cx: &mut ModelContext<Self>,
1380 ) -> Result<()> {
1381 self.pending_autoindent.take();
1382 let was_dirty = self.is_dirty();
1383 let old_version = self.version.clone();
1384 let mut deferred_ops = Vec::new();
1385 let buffer_ops = ops
1386 .into_iter()
1387 .filter_map(|op| match op {
1388 Operation::Buffer(op) => Some(op),
1389 _ => {
1390 if self.can_apply_op(&op) {
1391 self.apply_op(op, cx);
1392 } else {
1393 deferred_ops.push(op);
1394 }
1395 None
1396 }
1397 })
1398 .collect::<Vec<_>>();
1399 self.text.apply_ops(buffer_ops)?;
1400 self.flush_deferred_ops(cx);
1401 self.did_edit(&old_version, was_dirty, cx);
1402 // Notify independently of whether the buffer was edited as the operations could include a
1403 // selection update.
1404 cx.notify();
1405 Ok(())
1406 }
1407
1408 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1409 let mut deferred_ops = Vec::new();
1410 for op in self.deferred_ops.drain().iter().cloned() {
1411 if self.can_apply_op(&op) {
1412 self.apply_op(op, cx);
1413 } else {
1414 deferred_ops.push(op);
1415 }
1416 }
1417 self.deferred_ops.insert(deferred_ops);
1418 }
1419
1420 fn can_apply_op(&self, operation: &Operation) -> bool {
1421 match operation {
1422 Operation::Buffer(_) => {
1423 unreachable!("buffer operations should never be applied at this layer")
1424 }
1425 Operation::UpdateDiagnostics { diagnostics, .. } => {
1426 diagnostics.iter().all(|diagnostic| {
1427 self.text.can_resolve(&diagnostic.range.start)
1428 && self.text.can_resolve(&diagnostic.range.end)
1429 })
1430 }
1431 Operation::UpdateSelections { selections, .. } => selections
1432 .iter()
1433 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1434 Operation::RemoveSelections { .. } => true,
1435 }
1436 }
1437
1438 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1439 match operation {
1440 Operation::Buffer(_) => {
1441 unreachable!("buffer operations should never be applied at this layer")
1442 }
1443 Operation::UpdateDiagnostics { diagnostics, .. } => {
1444 self.apply_diagnostic_update(diagnostics, cx);
1445 }
1446 Operation::UpdateSelections {
1447 replica_id,
1448 selections,
1449 lamport_timestamp,
1450 } => {
1451 self.remote_selections.insert(replica_id, selections);
1452 self.text.lamport_clock.observe(lamport_timestamp);
1453 }
1454 Operation::RemoveSelections {
1455 replica_id,
1456 lamport_timestamp,
1457 } => {
1458 self.remote_selections.remove(&replica_id);
1459 self.text.lamport_clock.observe(lamport_timestamp);
1460 }
1461 }
1462 }
1463
1464 fn apply_diagnostic_update(
1465 &mut self,
1466 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1467 cx: &mut ModelContext<Self>,
1468 ) {
1469 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1470 self.diagnostics_update_count += 1;
1471 cx.notify();
1472 }
1473
1474 #[cfg(not(test))]
1475 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1476 if let Some(file) = &self.file {
1477 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1478 }
1479 }
1480
1481 #[cfg(test)]
1482 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1483 self.operations.push(operation);
1484 }
1485
1486 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1487 self.remote_selections.remove(&replica_id);
1488 cx.notify();
1489 }
1490
1491 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1492 let was_dirty = self.is_dirty();
1493 let old_version = self.version.clone();
1494
1495 if let Some((transaction_id, operation)) = self.text.undo() {
1496 self.send_operation(Operation::Buffer(operation), cx);
1497 self.did_edit(&old_version, was_dirty, cx);
1498 Some(transaction_id)
1499 } else {
1500 None
1501 }
1502 }
1503
1504 pub fn undo_transaction(
1505 &mut self,
1506 transaction_id: TransactionId,
1507 cx: &mut ModelContext<Self>,
1508 ) -> bool {
1509 let was_dirty = self.is_dirty();
1510 let old_version = self.version.clone();
1511
1512 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1513 self.send_operation(Operation::Buffer(operation), cx);
1514 self.did_edit(&old_version, was_dirty, cx);
1515 true
1516 } else {
1517 false
1518 }
1519 }
1520
1521 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1522 let was_dirty = self.is_dirty();
1523 let old_version = self.version.clone();
1524
1525 if let Some((transaction_id, operation)) = self.text.redo() {
1526 self.send_operation(Operation::Buffer(operation), cx);
1527 self.did_edit(&old_version, was_dirty, cx);
1528 Some(transaction_id)
1529 } else {
1530 None
1531 }
1532 }
1533
1534 pub fn redo_transaction(
1535 &mut self,
1536 transaction_id: TransactionId,
1537 cx: &mut ModelContext<Self>,
1538 ) -> bool {
1539 let was_dirty = self.is_dirty();
1540 let old_version = self.version.clone();
1541
1542 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1543 self.send_operation(Operation::Buffer(operation), cx);
1544 self.did_edit(&old_version, was_dirty, cx);
1545 true
1546 } else {
1547 false
1548 }
1549 }
1550}
1551
1552#[cfg(any(test, feature = "test-support"))]
1553impl Buffer {
1554 pub fn randomly_edit<T>(
1555 &mut self,
1556 rng: &mut T,
1557 old_range_count: usize,
1558 cx: &mut ModelContext<Self>,
1559 ) where
1560 T: rand::Rng,
1561 {
1562 self.start_transaction();
1563 self.text.randomly_edit(rng, old_range_count);
1564 self.end_transaction(cx);
1565 }
1566}
1567
1568impl Entity for Buffer {
1569 type Event = Event;
1570
1571 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1572 if let Some(file) = self.file.as_ref() {
1573 file.buffer_removed(self.remote_id(), cx);
1574 }
1575 }
1576}
1577
1578impl Deref for Buffer {
1579 type Target = TextBuffer;
1580
1581 fn deref(&self) -> &Self::Target {
1582 &self.text
1583 }
1584}
1585
1586impl BufferSnapshot {
1587 fn suggest_autoindents<'a>(
1588 &'a self,
1589 row_range: Range<u32>,
1590 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1591 let mut query_cursor = QueryCursorHandle::new();
1592 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1593 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1594
1595 // Get the "indentation ranges" that intersect this row range.
1596 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1597 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1598 query_cursor.set_point_range(
1599 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1600 ..Point::new(row_range.end, 0).to_ts_point(),
1601 );
1602 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1603 for mat in query_cursor.matches(
1604 &grammar.indents_query,
1605 tree.root_node(),
1606 TextProvider(self.as_rope()),
1607 ) {
1608 let mut node_kind = "";
1609 let mut start: Option<Point> = None;
1610 let mut end: Option<Point> = None;
1611 for capture in mat.captures {
1612 if Some(capture.index) == indent_capture_ix {
1613 node_kind = capture.node.kind();
1614 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1615 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1616 } else if Some(capture.index) == end_capture_ix {
1617 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1618 }
1619 }
1620
1621 if let Some((start, end)) = start.zip(end) {
1622 if start.row == end.row {
1623 continue;
1624 }
1625
1626 let range = start..end;
1627 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1628 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1629 Ok(ix) => {
1630 let prev_range = &mut indentation_ranges[ix];
1631 prev_range.0.end = prev_range.0.end.max(range.end);
1632 }
1633 }
1634 }
1635 }
1636
1637 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1638 Some(row_range.map(move |row| {
1639 let row_start = Point::new(row, self.indent_column_for_line(row));
1640
1641 let mut indent_from_prev_row = false;
1642 let mut outdent_to_row = u32::MAX;
1643 for (range, _node_kind) in &indentation_ranges {
1644 if range.start.row >= row {
1645 break;
1646 }
1647
1648 if range.start.row == prev_row && range.end > row_start {
1649 indent_from_prev_row = true;
1650 }
1651 if range.end.row >= prev_row && range.end <= row_start {
1652 outdent_to_row = outdent_to_row.min(range.start.row);
1653 }
1654 }
1655
1656 let suggestion = if outdent_to_row == prev_row {
1657 IndentSuggestion {
1658 basis_row: prev_row,
1659 indent: false,
1660 }
1661 } else if indent_from_prev_row {
1662 IndentSuggestion {
1663 basis_row: prev_row,
1664 indent: true,
1665 }
1666 } else if outdent_to_row < prev_row {
1667 IndentSuggestion {
1668 basis_row: outdent_to_row,
1669 indent: false,
1670 }
1671 } else {
1672 IndentSuggestion {
1673 basis_row: prev_row,
1674 indent: false,
1675 }
1676 };
1677
1678 prev_row = row;
1679 suggestion
1680 }))
1681 } else {
1682 None
1683 }
1684 }
1685
1686 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1687 while row > 0 {
1688 row -= 1;
1689 if !self.is_line_blank(row) {
1690 return Some(row);
1691 }
1692 }
1693 None
1694 }
1695
1696 pub fn chunks<'a, T: ToOffset>(
1697 &'a self,
1698 range: Range<T>,
1699 theme: Option<&'a SyntaxTheme>,
1700 ) -> BufferChunks<'a> {
1701 let range = range.start.to_offset(self)..range.end.to_offset(self);
1702
1703 let mut highlights = None;
1704 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1705 if let Some(theme) = theme {
1706 for entry in self
1707 .diagnostics
1708 .range::<_, usize>(range.clone(), self, true)
1709 {
1710 diagnostic_endpoints.push(DiagnosticEndpoint {
1711 offset: entry.range.start,
1712 is_start: true,
1713 severity: entry.diagnostic.severity,
1714 });
1715 diagnostic_endpoints.push(DiagnosticEndpoint {
1716 offset: entry.range.end,
1717 is_start: false,
1718 severity: entry.diagnostic.severity,
1719 });
1720 }
1721 diagnostic_endpoints
1722 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1723
1724 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1725 let mut query_cursor = QueryCursorHandle::new();
1726
1727 // TODO - add a Tree-sitter API to remove the need for this.
1728 let cursor = unsafe {
1729 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1730 };
1731 let captures = cursor.set_byte_range(range.clone()).captures(
1732 &grammar.highlights_query,
1733 tree.root_node(),
1734 TextProvider(self.text.as_rope()),
1735 );
1736 highlights = Some(BufferChunkHighlights {
1737 captures,
1738 next_capture: None,
1739 stack: Default::default(),
1740 highlight_map: grammar.highlight_map(),
1741 _query_cursor: query_cursor,
1742 theme,
1743 })
1744 }
1745 }
1746
1747 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1748 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1749
1750 BufferChunks {
1751 range,
1752 chunks,
1753 diagnostic_endpoints,
1754 error_depth: 0,
1755 warning_depth: 0,
1756 information_depth: 0,
1757 hint_depth: 0,
1758 highlights,
1759 }
1760 }
1761
1762 pub fn language(&self) -> Option<&Arc<Language>> {
1763 self.language.as_ref()
1764 }
1765
1766 fn grammar(&self) -> Option<&Arc<Grammar>> {
1767 self.language
1768 .as_ref()
1769 .and_then(|language| language.grammar.as_ref())
1770 }
1771
1772 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1773 if let Some(tree) = self.tree.as_ref() {
1774 let root = tree.root_node();
1775 let range = range.start.to_offset(self)..range.end.to_offset(self);
1776 let mut node = root.descendant_for_byte_range(range.start, range.end);
1777 while node.map_or(false, |n| n.byte_range() == range) {
1778 node = node.unwrap().parent();
1779 }
1780 node.map(|n| n.byte_range())
1781 } else {
1782 None
1783 }
1784 }
1785
1786 pub fn enclosing_bracket_ranges<T: ToOffset>(
1787 &self,
1788 range: Range<T>,
1789 ) -> Option<(Range<usize>, Range<usize>)> {
1790 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1791 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1792 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1793
1794 // Find bracket pairs that *inclusively* contain the given range.
1795 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1796 let mut cursor = QueryCursorHandle::new();
1797 let matches = cursor.set_byte_range(range).matches(
1798 &grammar.brackets_query,
1799 tree.root_node(),
1800 TextProvider(self.as_rope()),
1801 );
1802
1803 // Get the ranges of the innermost pair of brackets.
1804 matches
1805 .filter_map(|mat| {
1806 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1807 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1808 Some((open.byte_range(), close.byte_range()))
1809 })
1810 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1811 }
1812
1813 pub fn remote_selections_in_range<'a>(
1814 &'a self,
1815 range: Range<Anchor>,
1816 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
1817 {
1818 self.remote_selections
1819 .iter()
1820 .filter(|(replica_id, _)| **replica_id != self.text.replica_id())
1821 .map(move |(replica_id, selections)| {
1822 let start_ix = match selections
1823 .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap())
1824 {
1825 Ok(ix) | Err(ix) => ix,
1826 };
1827 let end_ix = match selections
1828 .binary_search_by(|probe| probe.start.cmp(&range.end, self).unwrap())
1829 {
1830 Ok(ix) | Err(ix) => ix,
1831 };
1832
1833 (*replica_id, selections[start_ix..end_ix].iter())
1834 })
1835 }
1836
1837 pub fn all_diagnostics<'a, O>(&'a self) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1838 where
1839 O: 'a + FromAnchor,
1840 {
1841 self.diagnostics
1842 .iter()
1843 .map(|diagnostic| diagnostic.resolve(self))
1844 }
1845
1846 pub fn diagnostics_in_range<'a, T, O>(
1847 &'a self,
1848 search_range: Range<T>,
1849 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1850 where
1851 T: 'a + ToOffset,
1852 O: 'a + FromAnchor,
1853 {
1854 self.diagnostics.range(search_range, self, true)
1855 }
1856
1857 pub fn diagnostic_groups<O>(&self) -> Vec<DiagnosticGroup<O>>
1858 where
1859 O: FromAnchor + Ord + Copy,
1860 {
1861 self.diagnostics.groups(self)
1862 }
1863
1864 pub fn diagnostic_group<'a, O>(
1865 &'a self,
1866 group_id: usize,
1867 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1868 where
1869 O: 'a + FromAnchor,
1870 {
1871 self.diagnostics.group(group_id, self)
1872 }
1873
1874 pub fn diagnostics_update_count(&self) -> usize {
1875 self.diagnostics_update_count
1876 }
1877
1878 pub fn parse_count(&self) -> usize {
1879 self.parse_count
1880 }
1881}
1882
1883impl Clone for BufferSnapshot {
1884 fn clone(&self) -> Self {
1885 Self {
1886 text: self.text.clone(),
1887 tree: self.tree.clone(),
1888 remote_selections: self.remote_selections.clone(),
1889 diagnostics: self.diagnostics.clone(),
1890 diagnostics_update_count: self.diagnostics_update_count,
1891 is_parsing: self.is_parsing,
1892 language: self.language.clone(),
1893 parse_count: self.parse_count,
1894 }
1895 }
1896}
1897
1898impl Deref for BufferSnapshot {
1899 type Target = text::BufferSnapshot;
1900
1901 fn deref(&self) -> &Self::Target {
1902 &self.text
1903 }
1904}
1905
1906impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1907 type I = ByteChunks<'a>;
1908
1909 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1910 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1911 }
1912}
1913
1914struct ByteChunks<'a>(rope::Chunks<'a>);
1915
1916impl<'a> Iterator for ByteChunks<'a> {
1917 type Item = &'a [u8];
1918
1919 fn next(&mut self) -> Option<Self::Item> {
1920 self.0.next().map(str::as_bytes)
1921 }
1922}
1923
1924unsafe impl<'a> Send for BufferChunks<'a> {}
1925
1926impl<'a> BufferChunks<'a> {
1927 pub fn seek(&mut self, offset: usize) {
1928 self.range.start = offset;
1929 self.chunks.seek(self.range.start);
1930 if let Some(highlights) = self.highlights.as_mut() {
1931 highlights
1932 .stack
1933 .retain(|(end_offset, _)| *end_offset > offset);
1934 if let Some((mat, capture_ix)) = &highlights.next_capture {
1935 let capture = mat.captures[*capture_ix as usize];
1936 if offset >= capture.node.start_byte() {
1937 let next_capture_end = capture.node.end_byte();
1938 if offset < next_capture_end {
1939 highlights.stack.push((
1940 next_capture_end,
1941 highlights.highlight_map.get(capture.index),
1942 ));
1943 }
1944 highlights.next_capture.take();
1945 }
1946 }
1947 highlights.captures.set_byte_range(self.range.clone());
1948 }
1949 }
1950
1951 pub fn offset(&self) -> usize {
1952 self.range.start
1953 }
1954
1955 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1956 let depth = match endpoint.severity {
1957 DiagnosticSeverity::ERROR => &mut self.error_depth,
1958 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1959 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1960 DiagnosticSeverity::HINT => &mut self.hint_depth,
1961 _ => return,
1962 };
1963 if endpoint.is_start {
1964 *depth += 1;
1965 } else {
1966 *depth -= 1;
1967 }
1968 }
1969
1970 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1971 if self.error_depth > 0 {
1972 Some(DiagnosticSeverity::ERROR)
1973 } else if self.warning_depth > 0 {
1974 Some(DiagnosticSeverity::WARNING)
1975 } else if self.information_depth > 0 {
1976 Some(DiagnosticSeverity::INFORMATION)
1977 } else if self.hint_depth > 0 {
1978 Some(DiagnosticSeverity::HINT)
1979 } else {
1980 None
1981 }
1982 }
1983}
1984
1985impl<'a> Iterator for BufferChunks<'a> {
1986 type Item = Chunk<'a>;
1987
1988 fn next(&mut self) -> Option<Self::Item> {
1989 let mut next_capture_start = usize::MAX;
1990 let mut next_diagnostic_endpoint = usize::MAX;
1991
1992 if let Some(highlights) = self.highlights.as_mut() {
1993 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1994 if *parent_capture_end <= self.range.start {
1995 highlights.stack.pop();
1996 } else {
1997 break;
1998 }
1999 }
2000
2001 if highlights.next_capture.is_none() {
2002 highlights.next_capture = highlights.captures.next();
2003 }
2004
2005 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2006 let capture = mat.captures[*capture_ix as usize];
2007 if self.range.start < capture.node.start_byte() {
2008 next_capture_start = capture.node.start_byte();
2009 break;
2010 } else {
2011 let highlight_id = highlights.highlight_map.get(capture.index);
2012 highlights
2013 .stack
2014 .push((capture.node.end_byte(), highlight_id));
2015 highlights.next_capture = highlights.captures.next();
2016 }
2017 }
2018 }
2019
2020 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2021 if endpoint.offset <= self.range.start {
2022 self.update_diagnostic_depths(endpoint);
2023 self.diagnostic_endpoints.next();
2024 } else {
2025 next_diagnostic_endpoint = endpoint.offset;
2026 break;
2027 }
2028 }
2029
2030 if let Some(chunk) = self.chunks.peek() {
2031 let chunk_start = self.range.start;
2032 let mut chunk_end = (self.chunks.offset() + chunk.len())
2033 .min(next_capture_start)
2034 .min(next_diagnostic_endpoint);
2035 let mut highlight_style = None;
2036 if let Some(highlights) = self.highlights.as_ref() {
2037 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2038 chunk_end = chunk_end.min(*parent_capture_end);
2039 highlight_style = parent_highlight_id.style(highlights.theme);
2040 }
2041 }
2042
2043 let slice =
2044 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2045 self.range.start = chunk_end;
2046 if self.range.start == self.chunks.offset() + chunk.len() {
2047 self.chunks.next().unwrap();
2048 }
2049
2050 Some(Chunk {
2051 text: slice,
2052 highlight_style,
2053 diagnostic: self.current_diagnostic_severity(),
2054 })
2055 } else {
2056 None
2057 }
2058 }
2059}
2060
2061impl QueryCursorHandle {
2062 fn new() -> Self {
2063 QueryCursorHandle(Some(
2064 QUERY_CURSORS
2065 .lock()
2066 .pop()
2067 .unwrap_or_else(|| QueryCursor::new()),
2068 ))
2069 }
2070}
2071
2072impl Deref for QueryCursorHandle {
2073 type Target = QueryCursor;
2074
2075 fn deref(&self) -> &Self::Target {
2076 self.0.as_ref().unwrap()
2077 }
2078}
2079
2080impl DerefMut for QueryCursorHandle {
2081 fn deref_mut(&mut self) -> &mut Self::Target {
2082 self.0.as_mut().unwrap()
2083 }
2084}
2085
2086impl Drop for QueryCursorHandle {
2087 fn drop(&mut self) {
2088 let mut cursor = self.0.take().unwrap();
2089 cursor.set_byte_range(0..usize::MAX);
2090 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2091 QUERY_CURSORS.lock().push(cursor)
2092 }
2093}
2094
2095trait ToTreeSitterPoint {
2096 fn to_ts_point(self) -> tree_sitter::Point;
2097 fn from_ts_point(point: tree_sitter::Point) -> Self;
2098}
2099
2100impl ToTreeSitterPoint for Point {
2101 fn to_ts_point(self) -> tree_sitter::Point {
2102 tree_sitter::Point::new(self.row as usize, self.column as usize)
2103 }
2104
2105 fn from_ts_point(point: tree_sitter::Point) -> Self {
2106 Point::new(point.row as u32, point.column as u32)
2107 }
2108}
2109
2110impl operation_queue::Operation for Operation {
2111 fn lamport_timestamp(&self) -> clock::Lamport {
2112 match self {
2113 Operation::Buffer(_) => {
2114 unreachable!("buffer operations should never be deferred at this layer")
2115 }
2116 Operation::UpdateDiagnostics {
2117 lamport_timestamp, ..
2118 }
2119 | Operation::UpdateSelections {
2120 lamport_timestamp, ..
2121 }
2122 | Operation::RemoveSelections {
2123 lamport_timestamp, ..
2124 } => *lamport_timestamp,
2125 }
2126 }
2127}
2128
2129impl Default for Diagnostic {
2130 fn default() -> Self {
2131 Self {
2132 source: Default::default(),
2133 code: Default::default(),
2134 severity: DiagnosticSeverity::ERROR,
2135 message: Default::default(),
2136 group_id: Default::default(),
2137 is_primary: Default::default(),
2138 is_valid: true,
2139 }
2140 }
2141}
2142
2143pub fn contiguous_ranges(
2144 values: impl Iterator<Item = u32>,
2145 max_len: usize,
2146) -> impl Iterator<Item = Range<u32>> {
2147 let mut values = values.into_iter();
2148 let mut current_range: Option<Range<u32>> = None;
2149 std::iter::from_fn(move || loop {
2150 if let Some(value) = values.next() {
2151 if let Some(range) = &mut current_range {
2152 if value == range.end && range.len() < max_len {
2153 range.end += 1;
2154 continue;
2155 }
2156 }
2157
2158 let prev_range = current_range.clone();
2159 current_range = Some(value..(value + 1));
2160 if prev_range.is_some() {
2161 return prev_range;
2162 }
2163 } else {
2164 return current_range.take();
2165 }
2166 })
2167}