1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 mem,
27 ops::{Deref, DerefMut, Range},
28 path::{Path, PathBuf},
29 str,
30 sync::Arc,
31 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
32 vec,
33};
34use text::operation_queue::OperationQueue;
35pub use text::{Buffer as TextBuffer, Operation as _, *};
36use theme::SyntaxTheme;
37use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
38use util::{post_inc, TryFutureExt as _};
39
40#[cfg(any(test, feature = "test-support"))]
41pub use tree_sitter_rust;
42
43pub use lsp::DiagnosticSeverity;
44
45thread_local! {
46 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
47}
48
49lazy_static! {
50 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
51}
52
53// TODO - Make this configurable
54const INDENT_SIZE: u32 = 4;
55
56pub struct Buffer {
57 text: TextBuffer,
58 file: Option<Box<dyn File>>,
59 saved_version: clock::Global,
60 saved_mtime: SystemTime,
61 language: Option<Arc<Language>>,
62 autoindent_requests: Vec<Arc<AutoindentRequest>>,
63 pending_autoindent: Option<Task<()>>,
64 sync_parse_timeout: Duration,
65 syntax_tree: Mutex<Option<SyntaxTree>>,
66 parsing_in_background: bool,
67 parse_count: usize,
68 diagnostics: DiagnosticSet,
69 diagnostics_update_count: usize,
70 language_server: Option<LanguageServerState>,
71 deferred_ops: OperationQueue<Operation>,
72 #[cfg(test)]
73 pub(crate) operations: Vec<Operation>,
74}
75
76pub struct BufferSnapshot {
77 text: text::BufferSnapshot,
78 tree: Option<Tree>,
79 diagnostics: DiagnosticSet,
80 diagnostics_update_count: usize,
81 is_parsing: bool,
82 language: Option<Arc<Language>>,
83 parse_count: usize,
84}
85
86#[derive(Clone, Debug, PartialEq, Eq)]
87pub struct Diagnostic {
88 pub severity: DiagnosticSeverity,
89 pub message: String,
90 pub group_id: usize,
91 pub is_primary: bool,
92}
93
94struct LanguageServerState {
95 server: Arc<LanguageServer>,
96 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
97 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
98 next_version: usize,
99 _maintain_server: Task<Option<()>>,
100}
101
102#[derive(Clone)]
103struct LanguageServerSnapshot {
104 buffer_snapshot: text::BufferSnapshot,
105 version: usize,
106 path: Arc<Path>,
107}
108
109#[derive(Clone, Debug)]
110pub enum Operation {
111 Buffer(text::Operation),
112 UpdateDiagnostics {
113 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
114 lamport_timestamp: clock::Lamport,
115 },
116}
117
118#[derive(Clone, Debug, Eq, PartialEq)]
119pub enum Event {
120 Edited,
121 Dirtied,
122 Saved,
123 FileHandleChanged,
124 Reloaded,
125 Reparsed,
126 DiagnosticsUpdated,
127 Closed,
128}
129
130pub trait File {
131 fn worktree_id(&self) -> usize;
132
133 fn entry_id(&self) -> Option<usize>;
134
135 fn mtime(&self) -> SystemTime;
136
137 /// Returns the path of this file relative to the worktree's root directory.
138 fn path(&self) -> &Arc<Path>;
139
140 /// Returns the absolute path of this file.
141 fn abs_path(&self) -> Option<PathBuf>;
142
143 /// Returns the path of this file relative to the worktree's parent directory (this means it
144 /// includes the name of the worktree's root folder).
145 fn full_path(&self) -> PathBuf;
146
147 /// Returns the last component of this handle's absolute path. If this handle refers to the root
148 /// of its worktree, then this method will return the name of the worktree itself.
149 fn file_name(&self) -> Option<OsString>;
150
151 fn is_deleted(&self) -> bool;
152
153 fn save(
154 &self,
155 buffer_id: u64,
156 text: Rope,
157 version: clock::Global,
158 cx: &mut MutableAppContext,
159 ) -> Task<Result<(clock::Global, SystemTime)>>;
160
161 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
162
163 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
164
165 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
166
167 fn boxed_clone(&self) -> Box<dyn File>;
168
169 fn as_any(&self) -> &dyn Any;
170}
171
172struct QueryCursorHandle(Option<QueryCursor>);
173
174#[derive(Clone)]
175struct SyntaxTree {
176 tree: Tree,
177 version: clock::Global,
178}
179
180#[derive(Clone)]
181struct AutoindentRequest {
182 selection_set_ids: HashSet<SelectionSetId>,
183 before_edit: BufferSnapshot,
184 edited: Vec<Anchor>,
185 inserted: Option<Vec<Range<Anchor>>>,
186}
187
188#[derive(Debug)]
189struct IndentSuggestion {
190 basis_row: u32,
191 indent: bool,
192}
193
194struct TextProvider<'a>(&'a Rope);
195
196struct BufferChunkHighlights<'a> {
197 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
198 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
199 stack: Vec<(usize, HighlightId)>,
200 highlight_map: HighlightMap,
201 theme: &'a SyntaxTheme,
202 _query_cursor: QueryCursorHandle,
203}
204
205pub struct BufferChunks<'a> {
206 range: Range<usize>,
207 chunks: rope::Chunks<'a>,
208 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
209 error_depth: usize,
210 warning_depth: usize,
211 information_depth: usize,
212 hint_depth: usize,
213 highlights: Option<BufferChunkHighlights<'a>>,
214}
215
216#[derive(Clone, Copy, Debug, Default)]
217pub struct Chunk<'a> {
218 pub text: &'a str,
219 pub highlight_style: Option<HighlightStyle>,
220 pub diagnostic: Option<DiagnosticSeverity>,
221}
222
223pub(crate) struct Diff {
224 base_version: clock::Global,
225 new_text: Arc<str>,
226 changes: Vec<(ChangeTag, usize)>,
227}
228
229#[derive(Clone, Copy)]
230struct DiagnosticEndpoint {
231 offset: usize,
232 is_start: bool,
233 severity: DiagnosticSeverity,
234}
235
236impl Buffer {
237 pub fn new<T: Into<Arc<str>>>(
238 replica_id: ReplicaId,
239 base_text: T,
240 cx: &mut ModelContext<Self>,
241 ) -> Self {
242 Self::build(
243 TextBuffer::new(
244 replica_id,
245 cx.model_id() as u64,
246 History::new(base_text.into()),
247 ),
248 None,
249 )
250 }
251
252 pub fn from_file<T: Into<Arc<str>>>(
253 replica_id: ReplicaId,
254 base_text: T,
255 file: Box<dyn File>,
256 cx: &mut ModelContext<Self>,
257 ) -> Self {
258 Self::build(
259 TextBuffer::new(
260 replica_id,
261 cx.model_id() as u64,
262 History::new(base_text.into()),
263 ),
264 Some(file),
265 )
266 }
267
268 pub fn from_proto(
269 replica_id: ReplicaId,
270 message: proto::Buffer,
271 file: Option<Box<dyn File>>,
272 cx: &mut ModelContext<Self>,
273 ) -> Result<Self> {
274 let mut buffer =
275 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
276 let ops = message
277 .history
278 .into_iter()
279 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
280 buffer.apply_ops(ops)?;
281 for set in message.selections {
282 let set = proto::deserialize_selection_set(set);
283 buffer.add_raw_selection_set(set.id, set);
284 }
285 let mut this = Self::build(buffer, file);
286 this.apply_diagnostic_update(
287 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
288 cx,
289 );
290
291 Ok(this)
292 }
293
294 pub fn to_proto(&self) -> proto::Buffer {
295 proto::Buffer {
296 id: self.remote_id(),
297 content: self.text.base_text().to_string(),
298 history: self
299 .text
300 .history()
301 .map(proto::serialize_edit_operation)
302 .collect(),
303 selections: self
304 .selection_sets()
305 .map(|(_, set)| proto::serialize_selection_set(set))
306 .collect(),
307 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
308 }
309 }
310
311 pub fn with_language(
312 mut self,
313 language: Option<Arc<Language>>,
314 language_server: Option<Arc<LanguageServer>>,
315 cx: &mut ModelContext<Self>,
316 ) -> Self {
317 self.set_language(language, language_server, cx);
318 self
319 }
320
321 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
322 let saved_mtime;
323 if let Some(file) = file.as_ref() {
324 saved_mtime = file.mtime();
325 } else {
326 saved_mtime = UNIX_EPOCH;
327 }
328
329 Self {
330 saved_mtime,
331 saved_version: buffer.version(),
332 text: buffer,
333 file,
334 syntax_tree: Mutex::new(None),
335 parsing_in_background: false,
336 parse_count: 0,
337 sync_parse_timeout: Duration::from_millis(1),
338 autoindent_requests: Default::default(),
339 pending_autoindent: Default::default(),
340 language: None,
341 diagnostics: Default::default(),
342 diagnostics_update_count: 0,
343 language_server: None,
344 deferred_ops: OperationQueue::new(),
345 #[cfg(test)]
346 operations: Default::default(),
347 }
348 }
349
350 pub fn snapshot(&self) -> BufferSnapshot {
351 BufferSnapshot {
352 text: self.text.snapshot(),
353 tree: self.syntax_tree(),
354 diagnostics: self.diagnostics.clone(),
355 diagnostics_update_count: self.diagnostics_update_count,
356 is_parsing: self.parsing_in_background,
357 language: self.language.clone(),
358 parse_count: self.parse_count,
359 }
360 }
361
362 pub fn file(&self) -> Option<&dyn File> {
363 self.file.as_deref()
364 }
365
366 pub fn save(
367 &mut self,
368 cx: &mut ModelContext<Self>,
369 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
370 let file = self
371 .file
372 .as_ref()
373 .ok_or_else(|| anyhow!("buffer has no file"))?;
374 let text = self.as_rope().clone();
375 let version = self.version();
376 let save = file.save(self.remote_id(), text, version, cx.as_mut());
377 Ok(cx.spawn(|this, mut cx| async move {
378 let (version, mtime) = save.await?;
379 this.update(&mut cx, |this, cx| {
380 this.did_save(version.clone(), mtime, None, cx);
381 });
382 Ok((version, mtime))
383 }))
384 }
385
386 pub fn set_language(
387 &mut self,
388 language: Option<Arc<Language>>,
389 language_server: Option<Arc<lsp::LanguageServer>>,
390 cx: &mut ModelContext<Self>,
391 ) {
392 self.language = language;
393 self.language_server = if let Some(server) = language_server {
394 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
395 Some(LanguageServerState {
396 latest_snapshot: latest_snapshot_tx,
397 pending_snapshots: Default::default(),
398 next_version: 0,
399 server: server.clone(),
400 _maintain_server: cx.background().spawn(
401 async move {
402 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
403 while let Some(snapshot) = latest_snapshot_rx.recv().await {
404 if let Some(snapshot) = snapshot {
405 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
406 if let Some(prev_snapshot) = prev_snapshot {
407 let changes = lsp::DidChangeTextDocumentParams {
408 text_document: lsp::VersionedTextDocumentIdentifier::new(
409 uri,
410 snapshot.version as i32,
411 ),
412 content_changes: snapshot
413 .buffer_snapshot
414 .edits_since::<(PointUtf16, usize)>(
415 prev_snapshot.buffer_snapshot.version(),
416 )
417 .map(|edit| {
418 let edit_start = edit.new.start.0;
419 let edit_end = edit_start
420 + (edit.old.end.0 - edit.old.start.0);
421 let new_text = snapshot
422 .buffer_snapshot
423 .text_for_range(
424 edit.new.start.1..edit.new.end.1,
425 )
426 .collect();
427 lsp::TextDocumentContentChangeEvent {
428 range: Some(lsp::Range::new(
429 lsp::Position::new(
430 edit_start.row,
431 edit_start.column,
432 ),
433 lsp::Position::new(
434 edit_end.row,
435 edit_end.column,
436 ),
437 )),
438 range_length: None,
439 text: new_text,
440 }
441 })
442 .collect(),
443 };
444 server
445 .notify::<lsp::notification::DidChangeTextDocument>(changes)
446 .await?;
447 } else {
448 server
449 .notify::<lsp::notification::DidOpenTextDocument>(
450 lsp::DidOpenTextDocumentParams {
451 text_document: lsp::TextDocumentItem::new(
452 uri,
453 Default::default(),
454 snapshot.version as i32,
455 snapshot.buffer_snapshot.text().to_string(),
456 ),
457 },
458 )
459 .await?;
460 }
461
462 prev_snapshot = Some(snapshot);
463 }
464 }
465 Ok(())
466 }
467 .log_err(),
468 ),
469 })
470 } else {
471 None
472 };
473
474 self.reparse(cx);
475 self.update_language_server();
476 }
477
478 pub fn did_save(
479 &mut self,
480 version: clock::Global,
481 mtime: SystemTime,
482 new_file: Option<Box<dyn File>>,
483 cx: &mut ModelContext<Self>,
484 ) {
485 self.saved_mtime = mtime;
486 self.saved_version = version;
487 if let Some(new_file) = new_file {
488 self.file = Some(new_file);
489 }
490 if let Some(state) = &self.language_server {
491 cx.background()
492 .spawn(
493 state
494 .server
495 .notify::<lsp::notification::DidSaveTextDocument>(
496 lsp::DidSaveTextDocumentParams {
497 text_document: lsp::TextDocumentIdentifier {
498 uri: lsp::Url::from_file_path(
499 self.file.as_ref().unwrap().abs_path().unwrap(),
500 )
501 .unwrap(),
502 },
503 text: None,
504 },
505 ),
506 )
507 .detach()
508 }
509 cx.emit(Event::Saved);
510 }
511
512 pub fn file_updated(
513 &mut self,
514 new_file: Box<dyn File>,
515 cx: &mut ModelContext<Self>,
516 ) -> Option<Task<()>> {
517 let old_file = self.file.as_ref()?;
518 let mut file_changed = false;
519 let mut task = None;
520
521 if new_file.path() != old_file.path() {
522 file_changed = true;
523 }
524
525 if new_file.is_deleted() {
526 if !old_file.is_deleted() {
527 file_changed = true;
528 if !self.is_dirty() {
529 cx.emit(Event::Dirtied);
530 }
531 }
532 } else {
533 let new_mtime = new_file.mtime();
534 if new_mtime != old_file.mtime() {
535 file_changed = true;
536
537 if !self.is_dirty() {
538 task = Some(cx.spawn(|this, mut cx| {
539 async move {
540 let new_text = this.read_with(&cx, |this, cx| {
541 this.file.as_ref().and_then(|file| file.load_local(cx))
542 });
543 if let Some(new_text) = new_text {
544 let new_text = new_text.await?;
545 let diff = this
546 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
547 .await;
548 this.update(&mut cx, |this, cx| {
549 if this.apply_diff(diff, cx) {
550 this.saved_version = this.version();
551 this.saved_mtime = new_mtime;
552 cx.emit(Event::Reloaded);
553 }
554 });
555 }
556 Ok(())
557 }
558 .log_err()
559 .map(drop)
560 }));
561 }
562 }
563 }
564
565 if file_changed {
566 cx.emit(Event::FileHandleChanged);
567 }
568 self.file = Some(new_file);
569 task
570 }
571
572 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
573 cx.emit(Event::Closed);
574 }
575
576 pub fn language(&self) -> Option<&Arc<Language>> {
577 self.language.as_ref()
578 }
579
580 pub fn parse_count(&self) -> usize {
581 self.parse_count
582 }
583
584 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
585 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
586 self.interpolate_tree(syntax_tree);
587 Some(syntax_tree.tree.clone())
588 } else {
589 None
590 }
591 }
592
593 #[cfg(any(test, feature = "test-support"))]
594 pub fn is_parsing(&self) -> bool {
595 self.parsing_in_background
596 }
597
598 #[cfg(test)]
599 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
600 self.sync_parse_timeout = timeout;
601 }
602
603 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
604 if self.parsing_in_background {
605 return false;
606 }
607
608 if let Some(grammar) = self.grammar().cloned() {
609 let old_tree = self.syntax_tree();
610 let text = self.as_rope().clone();
611 let parsed_version = self.version();
612 let parse_task = cx.background().spawn({
613 let grammar = grammar.clone();
614 async move { Self::parse_text(&text, old_tree, &grammar) }
615 });
616
617 match cx
618 .background()
619 .block_with_timeout(self.sync_parse_timeout, parse_task)
620 {
621 Ok(new_tree) => {
622 self.did_finish_parsing(new_tree, parsed_version, cx);
623 return true;
624 }
625 Err(parse_task) => {
626 self.parsing_in_background = true;
627 cx.spawn(move |this, mut cx| async move {
628 let new_tree = parse_task.await;
629 this.update(&mut cx, move |this, cx| {
630 let grammar_changed = this
631 .grammar()
632 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
633 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
634 this.parsing_in_background = false;
635 this.did_finish_parsing(new_tree, parsed_version, cx);
636
637 if parse_again && this.reparse(cx) {
638 return;
639 }
640 });
641 })
642 .detach();
643 }
644 }
645 }
646 false
647 }
648
649 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
650 PARSER.with(|parser| {
651 let mut parser = parser.borrow_mut();
652 parser
653 .set_language(grammar.ts_language)
654 .expect("incompatible grammar");
655 let mut chunks = text.chunks_in_range(0..text.len());
656 let tree = parser
657 .parse_with(
658 &mut move |offset, _| {
659 chunks.seek(offset);
660 chunks.next().unwrap_or("").as_bytes()
661 },
662 old_tree.as_ref(),
663 )
664 .unwrap();
665 tree
666 })
667 }
668
669 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
670 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
671 let (bytes, lines) = edit.flatten();
672 tree.tree.edit(&InputEdit {
673 start_byte: bytes.new.start,
674 old_end_byte: bytes.new.start + bytes.old.len(),
675 new_end_byte: bytes.new.end,
676 start_position: lines.new.start.to_ts_point(),
677 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
678 .to_ts_point(),
679 new_end_position: lines.new.end.to_ts_point(),
680 });
681 }
682 tree.version = self.version();
683 }
684
685 fn did_finish_parsing(
686 &mut self,
687 tree: Tree,
688 version: clock::Global,
689 cx: &mut ModelContext<Self>,
690 ) {
691 self.parse_count += 1;
692 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
693 self.request_autoindent(cx);
694 cx.emit(Event::Reparsed);
695 cx.notify();
696 }
697
698 pub fn update_diagnostics(
699 &mut self,
700 version: Option<i32>,
701 mut diagnostics: Vec<lsp::Diagnostic>,
702 cx: &mut ModelContext<Self>,
703 ) -> Result<Operation> {
704 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
705
706 let version = version.map(|version| version as usize);
707 let content = if let Some(version) = version {
708 let language_server = self.language_server.as_mut().unwrap();
709 let snapshot = language_server
710 .pending_snapshots
711 .get(&version)
712 .ok_or_else(|| anyhow!("missing snapshot"))?;
713 &snapshot.buffer_snapshot
714 } else {
715 self.deref()
716 };
717 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
718
719 let empty_set = HashSet::new();
720 let disk_based_sources = self
721 .language
722 .as_ref()
723 .and_then(|language| language.disk_based_diagnostic_sources())
724 .unwrap_or(&empty_set);
725
726 let mut edits_since_save = content
727 .edits_since::<PointUtf16>(&self.saved_version)
728 .peekable();
729 let mut last_edit_old_end = PointUtf16::zero();
730 let mut last_edit_new_end = PointUtf16::zero();
731 let mut group_ids_by_diagnostic_range = HashMap::new();
732 let mut diagnostics_by_group_id = HashMap::new();
733 let mut next_group_id = 0;
734 'outer: for diagnostic in &diagnostics {
735 let mut start = diagnostic.range.start.to_point_utf16();
736 let mut end = diagnostic.range.end.to_point_utf16();
737 let source = diagnostic.source.as_ref();
738 let code = diagnostic.code.as_ref();
739 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
740 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
741 .copied()
742 .unwrap_or_else(|| {
743 let group_id = post_inc(&mut next_group_id);
744 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
745 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
746 }
747 group_id
748 });
749
750 if diagnostic
751 .source
752 .as_ref()
753 .map_or(false, |source| disk_based_sources.contains(source))
754 {
755 while let Some(edit) = edits_since_save.peek() {
756 if edit.old.end <= start {
757 last_edit_old_end = edit.old.end;
758 last_edit_new_end = edit.new.end;
759 edits_since_save.next();
760 } else if edit.old.start <= end && edit.old.end >= start {
761 continue 'outer;
762 } else {
763 break;
764 }
765 }
766
767 start = last_edit_new_end + (start - last_edit_old_end);
768 end = last_edit_new_end + (end - last_edit_old_end);
769 }
770
771 let mut range = content.clip_point_utf16(start, Bias::Left)
772 ..content.clip_point_utf16(end, Bias::Right);
773 if range.start == range.end {
774 range.end.column += 1;
775 range.end = content.clip_point_utf16(range.end, Bias::Right);
776 if range.start == range.end && range.end.column > 0 {
777 range.start.column -= 1;
778 range.start = content.clip_point_utf16(range.start, Bias::Left);
779 }
780 }
781
782 diagnostics_by_group_id
783 .entry(group_id)
784 .or_insert(Vec::new())
785 .push(DiagnosticEntry {
786 range,
787 diagnostic: Diagnostic {
788 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
789 message: diagnostic.message.clone(),
790 group_id,
791 is_primary: false,
792 },
793 });
794 }
795
796 drop(edits_since_save);
797 let mut diagnostics = mem::take(&mut self.diagnostics);
798 diagnostics.reset(
799 diagnostics_by_group_id
800 .into_values()
801 .flat_map(|mut diagnostics| {
802 let primary = diagnostics
803 .iter_mut()
804 .min_by_key(|entry| entry.diagnostic.severity)
805 .unwrap();
806 primary.diagnostic.is_primary = true;
807 diagnostics
808 }),
809 self,
810 );
811 self.diagnostics = diagnostics;
812
813 if let Some(version) = version {
814 let language_server = self.language_server.as_mut().unwrap();
815 let versions_to_delete = language_server
816 .pending_snapshots
817 .range(..version)
818 .map(|(v, _)| *v)
819 .collect::<Vec<_>>();
820 for version in versions_to_delete {
821 language_server.pending_snapshots.remove(&version);
822 }
823 }
824
825 self.diagnostics_update_count += 1;
826 cx.notify();
827 cx.emit(Event::DiagnosticsUpdated);
828 Ok(Operation::UpdateDiagnostics {
829 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
830 lamport_timestamp: self.lamport_timestamp(),
831 })
832 }
833
834 pub fn diagnostics_in_range<'a, T, O>(
835 &'a self,
836 search_range: Range<T>,
837 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
838 where
839 T: 'a + ToOffset,
840 O: 'a + FromAnchor,
841 {
842 self.diagnostics.range(search_range, self, true)
843 }
844
845 pub fn diagnostic_group<'a, O>(
846 &'a self,
847 group_id: usize,
848 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
849 where
850 O: 'a + FromAnchor,
851 {
852 self.diagnostics.group(group_id, self)
853 }
854
855 pub fn diagnostics_update_count(&self) -> usize {
856 self.diagnostics_update_count
857 }
858
859 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
860 if let Some(indent_columns) = self.compute_autoindents() {
861 let indent_columns = cx.background().spawn(indent_columns);
862 match cx
863 .background()
864 .block_with_timeout(Duration::from_micros(500), indent_columns)
865 {
866 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
867 Err(indent_columns) => {
868 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
869 let indent_columns = indent_columns.await;
870 this.update(&mut cx, |this, cx| {
871 this.apply_autoindents(indent_columns, cx);
872 });
873 }));
874 }
875 }
876 }
877 }
878
879 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
880 let max_rows_between_yields = 100;
881 let snapshot = self.snapshot();
882 if snapshot.language.is_none()
883 || snapshot.tree.is_none()
884 || self.autoindent_requests.is_empty()
885 {
886 return None;
887 }
888
889 let autoindent_requests = self.autoindent_requests.clone();
890 Some(async move {
891 let mut indent_columns = BTreeMap::new();
892 for request in autoindent_requests {
893 let old_to_new_rows = request
894 .edited
895 .iter()
896 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
897 .zip(
898 request
899 .edited
900 .iter()
901 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
902 )
903 .collect::<BTreeMap<u32, u32>>();
904
905 let mut old_suggestions = HashMap::<u32, u32>::default();
906 let old_edited_ranges =
907 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
908 for old_edited_range in old_edited_ranges {
909 let suggestions = request
910 .before_edit
911 .suggest_autoindents(old_edited_range.clone())
912 .into_iter()
913 .flatten();
914 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
915 let indentation_basis = old_to_new_rows
916 .get(&suggestion.basis_row)
917 .and_then(|from_row| old_suggestions.get(from_row).copied())
918 .unwrap_or_else(|| {
919 request
920 .before_edit
921 .indent_column_for_line(suggestion.basis_row)
922 });
923 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
924 old_suggestions.insert(
925 *old_to_new_rows.get(&old_row).unwrap(),
926 indentation_basis + delta,
927 );
928 }
929 yield_now().await;
930 }
931
932 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
933 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
934 let new_edited_row_ranges =
935 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
936 for new_edited_row_range in new_edited_row_ranges {
937 let suggestions = snapshot
938 .suggest_autoindents(new_edited_row_range.clone())
939 .into_iter()
940 .flatten();
941 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
942 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
943 let new_indentation = indent_columns
944 .get(&suggestion.basis_row)
945 .copied()
946 .unwrap_or_else(|| {
947 snapshot.indent_column_for_line(suggestion.basis_row)
948 })
949 + delta;
950 if old_suggestions
951 .get(&new_row)
952 .map_or(true, |old_indentation| new_indentation != *old_indentation)
953 {
954 indent_columns.insert(new_row, new_indentation);
955 }
956 }
957 yield_now().await;
958 }
959
960 if let Some(inserted) = request.inserted.as_ref() {
961 let inserted_row_ranges = contiguous_ranges(
962 inserted
963 .iter()
964 .map(|range| range.to_point(&snapshot))
965 .flat_map(|range| range.start.row..range.end.row + 1),
966 max_rows_between_yields,
967 );
968 for inserted_row_range in inserted_row_ranges {
969 let suggestions = snapshot
970 .suggest_autoindents(inserted_row_range.clone())
971 .into_iter()
972 .flatten();
973 for (row, suggestion) in inserted_row_range.zip(suggestions) {
974 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
975 let new_indentation = indent_columns
976 .get(&suggestion.basis_row)
977 .copied()
978 .unwrap_or_else(|| {
979 snapshot.indent_column_for_line(suggestion.basis_row)
980 })
981 + delta;
982 indent_columns.insert(row, new_indentation);
983 }
984 yield_now().await;
985 }
986 }
987 }
988 indent_columns
989 })
990 }
991
992 fn apply_autoindents(
993 &mut self,
994 indent_columns: BTreeMap<u32, u32>,
995 cx: &mut ModelContext<Self>,
996 ) {
997 let selection_set_ids = self
998 .autoindent_requests
999 .drain(..)
1000 .flat_map(|req| req.selection_set_ids.clone())
1001 .collect::<HashSet<_>>();
1002
1003 self.start_transaction(selection_set_ids.iter().copied())
1004 .unwrap();
1005 for (row, indent_column) in &indent_columns {
1006 self.set_indent_column_for_line(*row, *indent_column, cx);
1007 }
1008
1009 for selection_set_id in &selection_set_ids {
1010 if let Ok(set) = self.selection_set(*selection_set_id) {
1011 let new_selections = set
1012 .selections::<Point>(&*self)
1013 .map(|selection| {
1014 if selection.start.column == 0 {
1015 let delta = Point::new(
1016 0,
1017 indent_columns
1018 .get(&selection.start.row)
1019 .copied()
1020 .unwrap_or(0),
1021 );
1022 if delta.column > 0 {
1023 return Selection {
1024 id: selection.id,
1025 goal: selection.goal,
1026 reversed: selection.reversed,
1027 start: selection.start + delta,
1028 end: selection.end + delta,
1029 };
1030 }
1031 }
1032 selection
1033 })
1034 .collect::<Vec<_>>();
1035 self.update_selection_set(*selection_set_id, &new_selections, cx)
1036 .unwrap();
1037 }
1038 }
1039
1040 self.end_transaction(selection_set_ids.iter().copied(), cx)
1041 .unwrap();
1042 }
1043
1044 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1045 let current_column = self.indent_column_for_line(row);
1046 if column > current_column {
1047 let offset = Point::new(row, 0).to_offset(&*self);
1048 self.edit(
1049 [offset..offset],
1050 " ".repeat((column - current_column) as usize),
1051 cx,
1052 );
1053 } else if column < current_column {
1054 self.edit(
1055 [Point::new(row, 0)..Point::new(row, current_column - column)],
1056 "",
1057 cx,
1058 );
1059 }
1060 }
1061
1062 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1063 if let Some(tree) = self.syntax_tree() {
1064 let root = tree.root_node();
1065 let range = range.start.to_offset(self)..range.end.to_offset(self);
1066 let mut node = root.descendant_for_byte_range(range.start, range.end);
1067 while node.map_or(false, |n| n.byte_range() == range) {
1068 node = node.unwrap().parent();
1069 }
1070 node.map(|n| n.byte_range())
1071 } else {
1072 None
1073 }
1074 }
1075
1076 pub fn enclosing_bracket_ranges<T: ToOffset>(
1077 &self,
1078 range: Range<T>,
1079 ) -> Option<(Range<usize>, Range<usize>)> {
1080 let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
1081 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1082 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1083
1084 // Find bracket pairs that *inclusively* contain the given range.
1085 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1086 let mut cursor = QueryCursorHandle::new();
1087 let matches = cursor.set_byte_range(range).matches(
1088 &grammar.brackets_query,
1089 tree.root_node(),
1090 TextProvider(self.as_rope()),
1091 );
1092
1093 // Get the ranges of the innermost pair of brackets.
1094 matches
1095 .filter_map(|mat| {
1096 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1097 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1098 Some((open.byte_range(), close.byte_range()))
1099 })
1100 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1101 }
1102
1103 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1104 // TODO: it would be nice to not allocate here.
1105 let old_text = self.text();
1106 let base_version = self.version();
1107 cx.background().spawn(async move {
1108 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1109 .iter_all_changes()
1110 .map(|c| (c.tag(), c.value().len()))
1111 .collect::<Vec<_>>();
1112 Diff {
1113 base_version,
1114 new_text,
1115 changes,
1116 }
1117 })
1118 }
1119
1120 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1121 if self.version == diff.base_version {
1122 self.start_transaction(None).unwrap();
1123 let mut offset = 0;
1124 for (tag, len) in diff.changes {
1125 let range = offset..(offset + len);
1126 match tag {
1127 ChangeTag::Equal => offset += len,
1128 ChangeTag::Delete => self.edit(Some(range), "", cx),
1129 ChangeTag::Insert => {
1130 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1131 offset += len;
1132 }
1133 }
1134 }
1135 self.end_transaction(None, cx).unwrap();
1136 true
1137 } else {
1138 false
1139 }
1140 }
1141
1142 pub fn is_dirty(&self) -> bool {
1143 !self.saved_version.ge(&self.version)
1144 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1145 }
1146
1147 pub fn has_conflict(&self) -> bool {
1148 !self.saved_version.ge(&self.version)
1149 && self
1150 .file
1151 .as_ref()
1152 .map_or(false, |file| file.mtime() > self.saved_mtime)
1153 }
1154
1155 pub fn subscribe(&mut self) -> Subscription {
1156 self.text.subscribe()
1157 }
1158
1159 pub fn start_transaction(
1160 &mut self,
1161 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1162 ) -> Result<()> {
1163 self.start_transaction_at(selection_set_ids, Instant::now())
1164 }
1165
1166 pub(crate) fn start_transaction_at(
1167 &mut self,
1168 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1169 now: Instant,
1170 ) -> Result<()> {
1171 self.text.start_transaction_at(selection_set_ids, now)
1172 }
1173
1174 pub fn end_transaction(
1175 &mut self,
1176 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1177 cx: &mut ModelContext<Self>,
1178 ) -> Result<()> {
1179 self.end_transaction_at(selection_set_ids, Instant::now(), cx)
1180 }
1181
1182 pub(crate) fn end_transaction_at(
1183 &mut self,
1184 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1185 now: Instant,
1186 cx: &mut ModelContext<Self>,
1187 ) -> Result<()> {
1188 if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) {
1189 let was_dirty = start_version != self.saved_version;
1190 self.did_edit(&start_version, was_dirty, cx);
1191 }
1192 Ok(())
1193 }
1194
1195 fn update_language_server(&mut self) {
1196 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1197 language_server
1198 } else {
1199 return;
1200 };
1201 let abs_path = self
1202 .file
1203 .as_ref()
1204 .map_or(Path::new("/").to_path_buf(), |file| {
1205 file.abs_path().unwrap()
1206 });
1207
1208 let version = post_inc(&mut language_server.next_version);
1209 let snapshot = LanguageServerSnapshot {
1210 buffer_snapshot: self.text.snapshot(),
1211 version,
1212 path: Arc::from(abs_path),
1213 };
1214 language_server
1215 .pending_snapshots
1216 .insert(version, snapshot.clone());
1217 let _ = language_server
1218 .latest_snapshot
1219 .blocking_send(Some(snapshot));
1220 }
1221
1222 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1223 where
1224 I: IntoIterator<Item = Range<S>>,
1225 S: ToOffset,
1226 T: Into<String>,
1227 {
1228 self.edit_internal(ranges_iter, new_text, false, cx)
1229 }
1230
1231 pub fn edit_with_autoindent<I, S, T>(
1232 &mut self,
1233 ranges_iter: I,
1234 new_text: T,
1235 cx: &mut ModelContext<Self>,
1236 ) where
1237 I: IntoIterator<Item = Range<S>>,
1238 S: ToOffset,
1239 T: Into<String>,
1240 {
1241 self.edit_internal(ranges_iter, new_text, true, cx)
1242 }
1243
1244 pub fn edit_internal<I, S, T>(
1245 &mut self,
1246 ranges_iter: I,
1247 new_text: T,
1248 autoindent: bool,
1249 cx: &mut ModelContext<Self>,
1250 ) where
1251 I: IntoIterator<Item = Range<S>>,
1252 S: ToOffset,
1253 T: Into<String>,
1254 {
1255 let new_text = new_text.into();
1256
1257 // Skip invalid ranges and coalesce contiguous ones.
1258 let mut ranges: Vec<Range<usize>> = Vec::new();
1259 for range in ranges_iter {
1260 let range = range.start.to_offset(self)..range.end.to_offset(self);
1261 if !new_text.is_empty() || !range.is_empty() {
1262 if let Some(prev_range) = ranges.last_mut() {
1263 if prev_range.end >= range.start {
1264 prev_range.end = cmp::max(prev_range.end, range.end);
1265 } else {
1266 ranges.push(range);
1267 }
1268 } else {
1269 ranges.push(range);
1270 }
1271 }
1272 }
1273 if ranges.is_empty() {
1274 return;
1275 }
1276
1277 self.start_transaction(None).unwrap();
1278 self.pending_autoindent.take();
1279 let autoindent_request = if autoindent && self.language.is_some() {
1280 let before_edit = self.snapshot();
1281 let edited = ranges
1282 .iter()
1283 .filter_map(|range| {
1284 let start = range.start.to_point(self);
1285 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1286 None
1287 } else {
1288 Some(self.anchor_before(range.start))
1289 }
1290 })
1291 .collect();
1292 Some((before_edit, edited))
1293 } else {
1294 None
1295 };
1296
1297 let first_newline_ix = new_text.find('\n');
1298 let new_text_len = new_text.len();
1299
1300 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1301
1302 if let Some((before_edit, edited)) = autoindent_request {
1303 let mut inserted = None;
1304 if let Some(first_newline_ix) = first_newline_ix {
1305 let mut delta = 0isize;
1306 inserted = Some(
1307 ranges
1308 .iter()
1309 .map(|range| {
1310 let start =
1311 (delta + range.start as isize) as usize + first_newline_ix + 1;
1312 let end = (delta + range.start as isize) as usize + new_text_len;
1313 delta +=
1314 (range.end as isize - range.start as isize) + new_text_len as isize;
1315 self.anchor_before(start)..self.anchor_after(end)
1316 })
1317 .collect(),
1318 );
1319 }
1320
1321 let selection_set_ids = self
1322 .text
1323 .peek_undo_stack()
1324 .unwrap()
1325 .starting_selection_set_ids()
1326 .collect();
1327 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1328 selection_set_ids,
1329 before_edit,
1330 edited,
1331 inserted,
1332 }));
1333 }
1334
1335 self.end_transaction(None, cx).unwrap();
1336 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1337 }
1338
1339 fn did_edit(
1340 &mut self,
1341 old_version: &clock::Global,
1342 was_dirty: bool,
1343 cx: &mut ModelContext<Self>,
1344 ) {
1345 if self.edits_since::<usize>(old_version).next().is_none() {
1346 return;
1347 }
1348
1349 self.reparse(cx);
1350 self.update_language_server();
1351
1352 cx.emit(Event::Edited);
1353 if !was_dirty {
1354 cx.emit(Event::Dirtied);
1355 }
1356 cx.notify();
1357 }
1358
1359 fn grammar(&self) -> Option<&Arc<Grammar>> {
1360 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1361 }
1362
1363 pub fn add_selection_set<T: ToOffset>(
1364 &mut self,
1365 selections: &[Selection<T>],
1366 cx: &mut ModelContext<Self>,
1367 ) -> SelectionSetId {
1368 let operation = self.text.add_selection_set(selections);
1369 if let text::Operation::UpdateSelections { set_id, .. } = &operation {
1370 let set_id = *set_id;
1371 cx.notify();
1372 self.send_operation(Operation::Buffer(operation), cx);
1373 set_id
1374 } else {
1375 unreachable!()
1376 }
1377 }
1378
1379 pub fn update_selection_set<T: ToOffset>(
1380 &mut self,
1381 set_id: SelectionSetId,
1382 selections: &[Selection<T>],
1383 cx: &mut ModelContext<Self>,
1384 ) -> Result<()> {
1385 let operation = self.text.update_selection_set(set_id, selections)?;
1386 cx.notify();
1387 self.send_operation(Operation::Buffer(operation), cx);
1388 Ok(())
1389 }
1390
1391 pub fn set_active_selection_set(
1392 &mut self,
1393 set_id: Option<SelectionSetId>,
1394 cx: &mut ModelContext<Self>,
1395 ) -> Result<()> {
1396 let operation = self.text.set_active_selection_set(set_id)?;
1397 self.send_operation(Operation::Buffer(operation), cx);
1398 Ok(())
1399 }
1400
1401 pub fn remove_selection_set(
1402 &mut self,
1403 set_id: SelectionSetId,
1404 cx: &mut ModelContext<Self>,
1405 ) -> Result<()> {
1406 let operation = self.text.remove_selection_set(set_id)?;
1407 cx.notify();
1408 self.send_operation(Operation::Buffer(operation), cx);
1409 Ok(())
1410 }
1411
1412 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1413 &mut self,
1414 ops: I,
1415 cx: &mut ModelContext<Self>,
1416 ) -> Result<()> {
1417 self.pending_autoindent.take();
1418 let was_dirty = self.is_dirty();
1419 let old_version = self.version.clone();
1420 let mut deferred_ops = Vec::new();
1421 let buffer_ops = ops
1422 .into_iter()
1423 .filter_map(|op| match op {
1424 Operation::Buffer(op) => Some(op),
1425 _ => {
1426 if self.can_apply_op(&op) {
1427 self.apply_op(op, cx);
1428 } else {
1429 deferred_ops.push(op);
1430 }
1431 None
1432 }
1433 })
1434 .collect::<Vec<_>>();
1435 self.text.apply_ops(buffer_ops)?;
1436 self.flush_deferred_ops(cx);
1437 self.did_edit(&old_version, was_dirty, cx);
1438 // Notify independently of whether the buffer was edited as the operations could include a
1439 // selection update.
1440 cx.notify();
1441 Ok(())
1442 }
1443
1444 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1445 let mut deferred_ops = Vec::new();
1446 for op in self.deferred_ops.drain().iter().cloned() {
1447 if self.can_apply_op(&op) {
1448 self.apply_op(op, cx);
1449 } else {
1450 deferred_ops.push(op);
1451 }
1452 }
1453 self.deferred_ops.insert(deferred_ops);
1454 }
1455
1456 fn can_apply_op(&self, operation: &Operation) -> bool {
1457 match operation {
1458 Operation::Buffer(_) => {
1459 unreachable!("buffer operations should never be applied at this layer")
1460 }
1461 Operation::UpdateDiagnostics { diagnostics, .. } => {
1462 diagnostics.iter().all(|diagnostic| {
1463 self.text.can_resolve(&diagnostic.range.start)
1464 && self.text.can_resolve(&diagnostic.range.end)
1465 })
1466 }
1467 }
1468 }
1469
1470 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1471 match operation {
1472 Operation::Buffer(_) => {
1473 unreachable!("buffer operations should never be applied at this layer")
1474 }
1475 Operation::UpdateDiagnostics { diagnostics, .. } => {
1476 self.apply_diagnostic_update(diagnostics, cx);
1477 }
1478 }
1479 }
1480
1481 fn apply_diagnostic_update(
1482 &mut self,
1483 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1484 cx: &mut ModelContext<Self>,
1485 ) {
1486 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1487 self.diagnostics_update_count += 1;
1488 cx.notify();
1489 }
1490
1491 #[cfg(not(test))]
1492 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1493 if let Some(file) = &self.file {
1494 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1495 }
1496 }
1497
1498 #[cfg(test)]
1499 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1500 self.operations.push(operation);
1501 }
1502
1503 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1504 self.text.remove_peer(replica_id);
1505 cx.notify();
1506 }
1507
1508 pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
1509 let was_dirty = self.is_dirty();
1510 let old_version = self.version.clone();
1511
1512 for operation in self.text.undo() {
1513 self.send_operation(Operation::Buffer(operation), cx);
1514 }
1515
1516 self.did_edit(&old_version, was_dirty, cx);
1517 }
1518
1519 pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
1520 let was_dirty = self.is_dirty();
1521 let old_version = self.version.clone();
1522
1523 for operation in self.text.redo() {
1524 self.send_operation(Operation::Buffer(operation), cx);
1525 }
1526
1527 self.did_edit(&old_version, was_dirty, cx);
1528 }
1529}
1530
1531#[cfg(any(test, feature = "test-support"))]
1532impl Buffer {
1533 pub fn randomly_edit<T>(
1534 &mut self,
1535 rng: &mut T,
1536 old_range_count: usize,
1537 cx: &mut ModelContext<Self>,
1538 ) where
1539 T: rand::Rng,
1540 {
1541 self.start_transaction(None).unwrap();
1542 self.text.randomly_edit(rng, old_range_count);
1543 self.end_transaction(None, cx).unwrap();
1544 }
1545
1546 pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
1547 where
1548 T: rand::Rng,
1549 {
1550 self.start_transaction(None).unwrap();
1551 self.text.randomly_mutate(rng);
1552 self.end_transaction(None, cx).unwrap();
1553 }
1554}
1555
1556impl Entity for Buffer {
1557 type Event = Event;
1558
1559 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1560 if let Some(file) = self.file.as_ref() {
1561 file.buffer_removed(self.remote_id(), cx);
1562 }
1563 }
1564}
1565
1566impl Deref for Buffer {
1567 type Target = TextBuffer;
1568
1569 fn deref(&self) -> &Self::Target {
1570 &self.text
1571 }
1572}
1573
1574impl BufferSnapshot {
1575 fn suggest_autoindents<'a>(
1576 &'a self,
1577 row_range: Range<u32>,
1578 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1579 let mut query_cursor = QueryCursorHandle::new();
1580 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1581 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1582
1583 // Get the "indentation ranges" that intersect this row range.
1584 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1585 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1586 query_cursor.set_point_range(
1587 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1588 ..Point::new(row_range.end, 0).to_ts_point(),
1589 );
1590 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1591 for mat in query_cursor.matches(
1592 &grammar.indents_query,
1593 tree.root_node(),
1594 TextProvider(self.as_rope()),
1595 ) {
1596 let mut node_kind = "";
1597 let mut start: Option<Point> = None;
1598 let mut end: Option<Point> = None;
1599 for capture in mat.captures {
1600 if Some(capture.index) == indent_capture_ix {
1601 node_kind = capture.node.kind();
1602 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1603 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1604 } else if Some(capture.index) == end_capture_ix {
1605 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1606 }
1607 }
1608
1609 if let Some((start, end)) = start.zip(end) {
1610 if start.row == end.row {
1611 continue;
1612 }
1613
1614 let range = start..end;
1615 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1616 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1617 Ok(ix) => {
1618 let prev_range = &mut indentation_ranges[ix];
1619 prev_range.0.end = prev_range.0.end.max(range.end);
1620 }
1621 }
1622 }
1623 }
1624
1625 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1626 Some(row_range.map(move |row| {
1627 let row_start = Point::new(row, self.indent_column_for_line(row));
1628
1629 let mut indent_from_prev_row = false;
1630 let mut outdent_to_row = u32::MAX;
1631 for (range, _node_kind) in &indentation_ranges {
1632 if range.start.row >= row {
1633 break;
1634 }
1635
1636 if range.start.row == prev_row && range.end > row_start {
1637 indent_from_prev_row = true;
1638 }
1639 if range.end.row >= prev_row && range.end <= row_start {
1640 outdent_to_row = outdent_to_row.min(range.start.row);
1641 }
1642 }
1643
1644 let suggestion = if outdent_to_row == prev_row {
1645 IndentSuggestion {
1646 basis_row: prev_row,
1647 indent: false,
1648 }
1649 } else if indent_from_prev_row {
1650 IndentSuggestion {
1651 basis_row: prev_row,
1652 indent: true,
1653 }
1654 } else if outdent_to_row < prev_row {
1655 IndentSuggestion {
1656 basis_row: outdent_to_row,
1657 indent: false,
1658 }
1659 } else {
1660 IndentSuggestion {
1661 basis_row: prev_row,
1662 indent: false,
1663 }
1664 };
1665
1666 prev_row = row;
1667 suggestion
1668 }))
1669 } else {
1670 None
1671 }
1672 }
1673
1674 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1675 while row > 0 {
1676 row -= 1;
1677 if !self.is_line_blank(row) {
1678 return Some(row);
1679 }
1680 }
1681 None
1682 }
1683
1684 pub fn chunks<'a, T: ToOffset>(
1685 &'a self,
1686 range: Range<T>,
1687 theme: Option<&'a SyntaxTheme>,
1688 ) -> BufferChunks<'a> {
1689 let range = range.start.to_offset(self)..range.end.to_offset(self);
1690
1691 let mut highlights = None;
1692 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1693 if let Some(theme) = theme {
1694 for entry in self
1695 .diagnostics
1696 .range::<_, usize>(range.clone(), self, true)
1697 {
1698 diagnostic_endpoints.push(DiagnosticEndpoint {
1699 offset: entry.range.start,
1700 is_start: true,
1701 severity: entry.diagnostic.severity,
1702 });
1703 diagnostic_endpoints.push(DiagnosticEndpoint {
1704 offset: entry.range.end,
1705 is_start: false,
1706 severity: entry.diagnostic.severity,
1707 });
1708 }
1709 diagnostic_endpoints
1710 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1711
1712 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1713 let mut query_cursor = QueryCursorHandle::new();
1714
1715 // TODO - add a Tree-sitter API to remove the need for this.
1716 let cursor = unsafe {
1717 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1718 };
1719 let captures = cursor.set_byte_range(range.clone()).captures(
1720 &grammar.highlights_query,
1721 tree.root_node(),
1722 TextProvider(self.text.as_rope()),
1723 );
1724 highlights = Some(BufferChunkHighlights {
1725 captures,
1726 next_capture: None,
1727 stack: Default::default(),
1728 highlight_map: grammar.highlight_map(),
1729 _query_cursor: query_cursor,
1730 theme,
1731 })
1732 }
1733 }
1734
1735 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1736 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1737
1738 BufferChunks {
1739 range,
1740 chunks,
1741 diagnostic_endpoints,
1742 error_depth: 0,
1743 warning_depth: 0,
1744 information_depth: 0,
1745 hint_depth: 0,
1746 highlights,
1747 }
1748 }
1749
1750 fn grammar(&self) -> Option<&Arc<Grammar>> {
1751 self.language
1752 .as_ref()
1753 .and_then(|language| language.grammar.as_ref())
1754 }
1755
1756 pub fn diagnostics_update_count(&self) -> usize {
1757 self.diagnostics_update_count
1758 }
1759
1760 pub fn parse_count(&self) -> usize {
1761 self.parse_count
1762 }
1763}
1764
1765impl Clone for BufferSnapshot {
1766 fn clone(&self) -> Self {
1767 Self {
1768 text: self.text.clone(),
1769 tree: self.tree.clone(),
1770 diagnostics: self.diagnostics.clone(),
1771 diagnostics_update_count: self.diagnostics_update_count,
1772 is_parsing: self.is_parsing,
1773 language: self.language.clone(),
1774 parse_count: self.parse_count,
1775 }
1776 }
1777}
1778
1779impl Deref for BufferSnapshot {
1780 type Target = text::BufferSnapshot;
1781
1782 fn deref(&self) -> &Self::Target {
1783 &self.text
1784 }
1785}
1786
1787impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1788 type I = ByteChunks<'a>;
1789
1790 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1791 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1792 }
1793}
1794
1795struct ByteChunks<'a>(rope::Chunks<'a>);
1796
1797impl<'a> Iterator for ByteChunks<'a> {
1798 type Item = &'a [u8];
1799
1800 fn next(&mut self) -> Option<Self::Item> {
1801 self.0.next().map(str::as_bytes)
1802 }
1803}
1804
1805unsafe impl<'a> Send for BufferChunks<'a> {}
1806
1807impl<'a> BufferChunks<'a> {
1808 pub fn seek(&mut self, offset: usize) {
1809 self.range.start = offset;
1810 self.chunks.seek(self.range.start);
1811 if let Some(highlights) = self.highlights.as_mut() {
1812 highlights
1813 .stack
1814 .retain(|(end_offset, _)| *end_offset > offset);
1815 if let Some((mat, capture_ix)) = &highlights.next_capture {
1816 let capture = mat.captures[*capture_ix as usize];
1817 if offset >= capture.node.start_byte() {
1818 let next_capture_end = capture.node.end_byte();
1819 if offset < next_capture_end {
1820 highlights.stack.push((
1821 next_capture_end,
1822 highlights.highlight_map.get(capture.index),
1823 ));
1824 }
1825 highlights.next_capture.take();
1826 }
1827 }
1828 highlights.captures.set_byte_range(self.range.clone());
1829 }
1830 }
1831
1832 pub fn offset(&self) -> usize {
1833 self.range.start
1834 }
1835
1836 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1837 let depth = match endpoint.severity {
1838 DiagnosticSeverity::ERROR => &mut self.error_depth,
1839 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1840 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1841 DiagnosticSeverity::HINT => &mut self.hint_depth,
1842 _ => return,
1843 };
1844 if endpoint.is_start {
1845 *depth += 1;
1846 } else {
1847 *depth -= 1;
1848 }
1849 }
1850
1851 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1852 if self.error_depth > 0 {
1853 Some(DiagnosticSeverity::ERROR)
1854 } else if self.warning_depth > 0 {
1855 Some(DiagnosticSeverity::WARNING)
1856 } else if self.information_depth > 0 {
1857 Some(DiagnosticSeverity::INFORMATION)
1858 } else if self.hint_depth > 0 {
1859 Some(DiagnosticSeverity::HINT)
1860 } else {
1861 None
1862 }
1863 }
1864}
1865
1866impl<'a> Iterator for BufferChunks<'a> {
1867 type Item = Chunk<'a>;
1868
1869 fn next(&mut self) -> Option<Self::Item> {
1870 let mut next_capture_start = usize::MAX;
1871 let mut next_diagnostic_endpoint = usize::MAX;
1872
1873 if let Some(highlights) = self.highlights.as_mut() {
1874 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1875 if *parent_capture_end <= self.range.start {
1876 highlights.stack.pop();
1877 } else {
1878 break;
1879 }
1880 }
1881
1882 if highlights.next_capture.is_none() {
1883 highlights.next_capture = highlights.captures.next();
1884 }
1885
1886 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1887 let capture = mat.captures[*capture_ix as usize];
1888 if self.range.start < capture.node.start_byte() {
1889 next_capture_start = capture.node.start_byte();
1890 break;
1891 } else {
1892 let highlight_id = highlights.highlight_map.get(capture.index);
1893 highlights
1894 .stack
1895 .push((capture.node.end_byte(), highlight_id));
1896 highlights.next_capture = highlights.captures.next();
1897 }
1898 }
1899 }
1900
1901 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1902 if endpoint.offset <= self.range.start {
1903 self.update_diagnostic_depths(endpoint);
1904 self.diagnostic_endpoints.next();
1905 } else {
1906 next_diagnostic_endpoint = endpoint.offset;
1907 break;
1908 }
1909 }
1910
1911 if let Some(chunk) = self.chunks.peek() {
1912 let chunk_start = self.range.start;
1913 let mut chunk_end = (self.chunks.offset() + chunk.len())
1914 .min(next_capture_start)
1915 .min(next_diagnostic_endpoint);
1916 let mut highlight_style = None;
1917 if let Some(highlights) = self.highlights.as_ref() {
1918 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1919 chunk_end = chunk_end.min(*parent_capture_end);
1920 highlight_style = parent_highlight_id.style(highlights.theme);
1921 }
1922 }
1923
1924 let slice =
1925 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1926 self.range.start = chunk_end;
1927 if self.range.start == self.chunks.offset() + chunk.len() {
1928 self.chunks.next().unwrap();
1929 }
1930
1931 Some(Chunk {
1932 text: slice,
1933 highlight_style,
1934 diagnostic: self.current_diagnostic_severity(),
1935 })
1936 } else {
1937 None
1938 }
1939 }
1940}
1941
1942impl QueryCursorHandle {
1943 fn new() -> Self {
1944 QueryCursorHandle(Some(
1945 QUERY_CURSORS
1946 .lock()
1947 .pop()
1948 .unwrap_or_else(|| QueryCursor::new()),
1949 ))
1950 }
1951}
1952
1953impl Deref for QueryCursorHandle {
1954 type Target = QueryCursor;
1955
1956 fn deref(&self) -> &Self::Target {
1957 self.0.as_ref().unwrap()
1958 }
1959}
1960
1961impl DerefMut for QueryCursorHandle {
1962 fn deref_mut(&mut self) -> &mut Self::Target {
1963 self.0.as_mut().unwrap()
1964 }
1965}
1966
1967impl Drop for QueryCursorHandle {
1968 fn drop(&mut self) {
1969 let mut cursor = self.0.take().unwrap();
1970 cursor.set_byte_range(0..usize::MAX);
1971 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1972 QUERY_CURSORS.lock().push(cursor)
1973 }
1974}
1975
1976trait ToTreeSitterPoint {
1977 fn to_ts_point(self) -> tree_sitter::Point;
1978 fn from_ts_point(point: tree_sitter::Point) -> Self;
1979}
1980
1981impl ToTreeSitterPoint for Point {
1982 fn to_ts_point(self) -> tree_sitter::Point {
1983 tree_sitter::Point::new(self.row as usize, self.column as usize)
1984 }
1985
1986 fn from_ts_point(point: tree_sitter::Point) -> Self {
1987 Point::new(point.row as u32, point.column as u32)
1988 }
1989}
1990
1991trait ToPointUtf16 {
1992 fn to_point_utf16(self) -> PointUtf16;
1993}
1994
1995impl ToPointUtf16 for lsp::Position {
1996 fn to_point_utf16(self) -> PointUtf16 {
1997 PointUtf16::new(self.line, self.character)
1998 }
1999}
2000
2001impl operation_queue::Operation for Operation {
2002 fn lamport_timestamp(&self) -> clock::Lamport {
2003 match self {
2004 Operation::Buffer(_) => {
2005 unreachable!("buffer operations should never be deferred at this layer")
2006 }
2007 Operation::UpdateDiagnostics {
2008 lamport_timestamp, ..
2009 } => *lamport_timestamp,
2010 }
2011 }
2012}
2013
2014fn diagnostic_ranges<'a>(
2015 diagnostic: &'a lsp::Diagnostic,
2016 abs_path: Option<&'a Path>,
2017) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
2018 diagnostic
2019 .related_information
2020 .iter()
2021 .flatten()
2022 .filter_map(move |info| {
2023 if info.location.uri.to_file_path().ok()? == abs_path? {
2024 let info_start = PointUtf16::new(
2025 info.location.range.start.line,
2026 info.location.range.start.character,
2027 );
2028 let info_end = PointUtf16::new(
2029 info.location.range.end.line,
2030 info.location.range.end.character,
2031 );
2032 Some(info_start..info_end)
2033 } else {
2034 None
2035 }
2036 })
2037 .chain(Some(
2038 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
2039 ))
2040}
2041
2042pub fn contiguous_ranges(
2043 values: impl Iterator<Item = u32>,
2044 max_len: usize,
2045) -> impl Iterator<Item = Range<u32>> {
2046 let mut values = values.into_iter();
2047 let mut current_range: Option<Range<u32>> = None;
2048 std::iter::from_fn(move || loop {
2049 if let Some(value) = values.next() {
2050 if let Some(range) = &mut current_range {
2051 if value == range.end && range.len() < max_len {
2052 range.end += 1;
2053 continue;
2054 }
2055 }
2056
2057 let prev_range = current_range.clone();
2058 current_range = Some(value..(value + 1));
2059 if prev_range.is_some() {
2060 return prev_range;
2061 }
2062 } else {
2063 return current_range.take();
2064 }
2065 })
2066}