1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use text::operation_queue::OperationQueue;
34pub use text::{Buffer as TextBuffer, Operation as _, *};
35use theme::SyntaxTheme;
36use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
37use util::{post_inc, TryFutureExt as _};
38
39#[cfg(any(test, feature = "test-support"))]
40pub use tree_sitter_rust;
41
42pub use lsp::DiagnosticSeverity;
43
44thread_local! {
45 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
46}
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 diagnostics_update_count: usize,
69 language_server: Option<LanguageServerState>,
70 deferred_ops: OperationQueue<Operation>,
71 #[cfg(test)]
72 pub(crate) operations: Vec<Operation>,
73}
74
75pub struct BufferSnapshot {
76 text: text::BufferSnapshot,
77 tree: Option<Tree>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 is_parsing: bool,
81 language: Option<Arc<Language>>,
82 parse_count: usize,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86pub struct Diagnostic {
87 pub severity: DiagnosticSeverity,
88 pub message: String,
89 pub group_id: usize,
90 pub is_primary: bool,
91}
92
93struct LanguageServerState {
94 server: Arc<LanguageServer>,
95 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
96 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
97 next_version: usize,
98 _maintain_server: Task<Option<()>>,
99}
100
101#[derive(Clone)]
102struct LanguageServerSnapshot {
103 buffer_snapshot: text::BufferSnapshot,
104 version: usize,
105 path: Arc<Path>,
106}
107
108#[derive(Clone, Debug)]
109pub enum Operation {
110 Buffer(text::Operation),
111 UpdateDiagnostics {
112 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
113 lamport_timestamp: clock::Lamport,
114 },
115}
116
117#[derive(Clone, Debug, Eq, PartialEq)]
118pub enum Event {
119 Edited,
120 Dirtied,
121 Saved,
122 FileHandleChanged,
123 Reloaded,
124 Reparsed,
125 DiagnosticsUpdated,
126 Closed,
127}
128
129pub trait File {
130 fn worktree_id(&self) -> usize;
131
132 fn entry_id(&self) -> Option<usize>;
133
134 fn mtime(&self) -> SystemTime;
135
136 /// Returns the path of this file relative to the worktree's root directory.
137 fn path(&self) -> &Arc<Path>;
138
139 /// Returns the absolute path of this file.
140 fn abs_path(&self) -> Option<PathBuf>;
141
142 /// Returns the path of this file relative to the worktree's parent directory (this means it
143 /// includes the name of the worktree's root folder).
144 fn full_path(&self) -> PathBuf;
145
146 /// Returns the last component of this handle's absolute path. If this handle refers to the root
147 /// of its worktree, then this method will return the name of the worktree itself.
148 fn file_name(&self) -> Option<OsString>;
149
150 fn is_deleted(&self) -> bool;
151
152 fn save(
153 &self,
154 buffer_id: u64,
155 text: Rope,
156 version: clock::Global,
157 cx: &mut MutableAppContext,
158 ) -> Task<Result<(clock::Global, SystemTime)>>;
159
160 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
161
162 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
163
164 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
165
166 fn boxed_clone(&self) -> Box<dyn File>;
167
168 fn as_any(&self) -> &dyn Any;
169}
170
171struct QueryCursorHandle(Option<QueryCursor>);
172
173#[derive(Clone)]
174struct SyntaxTree {
175 tree: Tree,
176 version: clock::Global,
177}
178
179#[derive(Clone)]
180struct AutoindentRequest {
181 selection_set_ids: HashSet<SelectionSetId>,
182 before_edit: BufferSnapshot,
183 edited: Vec<Anchor>,
184 inserted: Option<Vec<Range<Anchor>>>,
185}
186
187#[derive(Debug)]
188struct IndentSuggestion {
189 basis_row: u32,
190 indent: bool,
191}
192
193struct TextProvider<'a>(&'a Rope);
194
195struct BufferChunkHighlights<'a> {
196 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
197 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
198 stack: Vec<(usize, HighlightId)>,
199 highlight_map: HighlightMap,
200 theme: &'a SyntaxTheme,
201 _query_cursor: QueryCursorHandle,
202}
203
204pub struct BufferChunks<'a> {
205 range: Range<usize>,
206 chunks: rope::Chunks<'a>,
207 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
208 error_depth: usize,
209 warning_depth: usize,
210 information_depth: usize,
211 hint_depth: usize,
212 highlights: Option<BufferChunkHighlights<'a>>,
213}
214
215#[derive(Clone, Copy, Debug, Default)]
216pub struct Chunk<'a> {
217 pub text: &'a str,
218 pub highlight_style: Option<HighlightStyle>,
219 pub diagnostic: Option<DiagnosticSeverity>,
220}
221
222pub(crate) struct Diff {
223 base_version: clock::Global,
224 new_text: Arc<str>,
225 changes: Vec<(ChangeTag, usize)>,
226}
227
228#[derive(Clone, Copy)]
229struct DiagnosticEndpoint {
230 offset: usize,
231 is_start: bool,
232 severity: DiagnosticSeverity,
233}
234
235impl Buffer {
236 pub fn new<T: Into<Arc<str>>>(
237 replica_id: ReplicaId,
238 base_text: T,
239 cx: &mut ModelContext<Self>,
240 ) -> Self {
241 Self::build(
242 TextBuffer::new(
243 replica_id,
244 cx.model_id() as u64,
245 History::new(base_text.into()),
246 ),
247 None,
248 )
249 }
250
251 pub fn from_file<T: Into<Arc<str>>>(
252 replica_id: ReplicaId,
253 base_text: T,
254 file: Box<dyn File>,
255 cx: &mut ModelContext<Self>,
256 ) -> Self {
257 Self::build(
258 TextBuffer::new(
259 replica_id,
260 cx.model_id() as u64,
261 History::new(base_text.into()),
262 ),
263 Some(file),
264 )
265 }
266
267 pub fn from_proto(
268 replica_id: ReplicaId,
269 message: proto::Buffer,
270 file: Option<Box<dyn File>>,
271 cx: &mut ModelContext<Self>,
272 ) -> Result<Self> {
273 let mut buffer =
274 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
275 let ops = message
276 .history
277 .into_iter()
278 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
279 buffer.apply_ops(ops)?;
280 for set in message.selections {
281 let set = proto::deserialize_selection_set(set);
282 buffer.add_raw_selection_set(set.id, set);
283 }
284 let mut this = Self::build(buffer, file);
285 this.apply_diagnostic_update(
286 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
287 cx,
288 );
289
290 Ok(this)
291 }
292
293 pub fn to_proto(&self) -> proto::Buffer {
294 proto::Buffer {
295 id: self.remote_id(),
296 content: self.text.base_text().to_string(),
297 history: self
298 .text
299 .history()
300 .map(proto::serialize_edit_operation)
301 .collect(),
302 selections: self
303 .selection_sets()
304 .map(|(_, set)| proto::serialize_selection_set(set))
305 .collect(),
306 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
307 }
308 }
309
310 pub fn with_language(
311 mut self,
312 language: Option<Arc<Language>>,
313 language_server: Option<Arc<LanguageServer>>,
314 cx: &mut ModelContext<Self>,
315 ) -> Self {
316 self.set_language(language, language_server, cx);
317 self
318 }
319
320 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
321 let saved_mtime;
322 if let Some(file) = file.as_ref() {
323 saved_mtime = file.mtime();
324 } else {
325 saved_mtime = UNIX_EPOCH;
326 }
327
328 Self {
329 saved_mtime,
330 saved_version: buffer.version(),
331 text: buffer,
332 file,
333 syntax_tree: Mutex::new(None),
334 parsing_in_background: false,
335 parse_count: 0,
336 sync_parse_timeout: Duration::from_millis(1),
337 autoindent_requests: Default::default(),
338 pending_autoindent: Default::default(),
339 language: None,
340 diagnostics: Default::default(),
341 diagnostics_update_count: 0,
342 language_server: None,
343 deferred_ops: OperationQueue::new(),
344 #[cfg(test)]
345 operations: Default::default(),
346 }
347 }
348
349 pub fn snapshot(&self) -> BufferSnapshot {
350 BufferSnapshot {
351 text: self.text.snapshot(),
352 tree: self.syntax_tree(),
353 diagnostics: self.diagnostics.clone(),
354 diagnostics_update_count: self.diagnostics_update_count,
355 is_parsing: self.parsing_in_background,
356 language: self.language.clone(),
357 parse_count: self.parse_count,
358 }
359 }
360
361 pub fn file(&self) -> Option<&dyn File> {
362 self.file.as_deref()
363 }
364
365 pub fn save(
366 &mut self,
367 cx: &mut ModelContext<Self>,
368 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
369 let file = self
370 .file
371 .as_ref()
372 .ok_or_else(|| anyhow!("buffer has no file"))?;
373 let text = self.as_rope().clone();
374 let version = self.version();
375 let save = file.save(self.remote_id(), text, version, cx.as_mut());
376 Ok(cx.spawn(|this, mut cx| async move {
377 let (version, mtime) = save.await?;
378 this.update(&mut cx, |this, cx| {
379 this.did_save(version.clone(), mtime, None, cx);
380 });
381 Ok((version, mtime))
382 }))
383 }
384
385 pub fn set_language(
386 &mut self,
387 language: Option<Arc<Language>>,
388 language_server: Option<Arc<lsp::LanguageServer>>,
389 cx: &mut ModelContext<Self>,
390 ) {
391 self.language = language;
392 self.language_server = if let Some(server) = language_server {
393 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
394 Some(LanguageServerState {
395 latest_snapshot: latest_snapshot_tx,
396 pending_snapshots: Default::default(),
397 next_version: 0,
398 server: server.clone(),
399 _maintain_server: cx.background().spawn(
400 async move {
401 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
402 while let Some(snapshot) = latest_snapshot_rx.recv().await {
403 if let Some(snapshot) = snapshot {
404 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
405 if let Some(prev_snapshot) = prev_snapshot {
406 let changes = lsp::DidChangeTextDocumentParams {
407 text_document: lsp::VersionedTextDocumentIdentifier::new(
408 uri,
409 snapshot.version as i32,
410 ),
411 content_changes: snapshot
412 .buffer_snapshot
413 .edits_since::<(PointUtf16, usize)>(
414 prev_snapshot.buffer_snapshot.version(),
415 )
416 .map(|edit| {
417 let edit_start = edit.new.start.0;
418 let edit_end = edit_start
419 + (edit.old.end.0 - edit.old.start.0);
420 let new_text = snapshot
421 .buffer_snapshot
422 .text_for_range(
423 edit.new.start.1..edit.new.end.1,
424 )
425 .collect();
426 lsp::TextDocumentContentChangeEvent {
427 range: Some(lsp::Range::new(
428 lsp::Position::new(
429 edit_start.row,
430 edit_start.column,
431 ),
432 lsp::Position::new(
433 edit_end.row,
434 edit_end.column,
435 ),
436 )),
437 range_length: None,
438 text: new_text,
439 }
440 })
441 .collect(),
442 };
443 server
444 .notify::<lsp::notification::DidChangeTextDocument>(changes)
445 .await?;
446 } else {
447 server
448 .notify::<lsp::notification::DidOpenTextDocument>(
449 lsp::DidOpenTextDocumentParams {
450 text_document: lsp::TextDocumentItem::new(
451 uri,
452 Default::default(),
453 snapshot.version as i32,
454 snapshot.buffer_snapshot.text().to_string(),
455 ),
456 },
457 )
458 .await?;
459 }
460
461 prev_snapshot = Some(snapshot);
462 }
463 }
464 Ok(())
465 }
466 .log_err(),
467 ),
468 })
469 } else {
470 None
471 };
472
473 self.reparse(cx);
474 self.update_language_server();
475 }
476
477 pub fn did_save(
478 &mut self,
479 version: clock::Global,
480 mtime: SystemTime,
481 new_file: Option<Box<dyn File>>,
482 cx: &mut ModelContext<Self>,
483 ) {
484 self.saved_mtime = mtime;
485 self.saved_version = version;
486 if let Some(new_file) = new_file {
487 self.file = Some(new_file);
488 }
489 if let Some(state) = &self.language_server {
490 cx.background()
491 .spawn(
492 state
493 .server
494 .notify::<lsp::notification::DidSaveTextDocument>(
495 lsp::DidSaveTextDocumentParams {
496 text_document: lsp::TextDocumentIdentifier {
497 uri: lsp::Url::from_file_path(
498 self.file.as_ref().unwrap().abs_path().unwrap(),
499 )
500 .unwrap(),
501 },
502 text: None,
503 },
504 ),
505 )
506 .detach()
507 }
508 cx.emit(Event::Saved);
509 }
510
511 pub fn file_updated(
512 &mut self,
513 new_file: Box<dyn File>,
514 cx: &mut ModelContext<Self>,
515 ) -> Option<Task<()>> {
516 let old_file = self.file.as_ref()?;
517 let mut file_changed = false;
518 let mut task = None;
519
520 if new_file.path() != old_file.path() {
521 file_changed = true;
522 }
523
524 if new_file.is_deleted() {
525 if !old_file.is_deleted() {
526 file_changed = true;
527 if !self.is_dirty() {
528 cx.emit(Event::Dirtied);
529 }
530 }
531 } else {
532 let new_mtime = new_file.mtime();
533 if new_mtime != old_file.mtime() {
534 file_changed = true;
535
536 if !self.is_dirty() {
537 task = Some(cx.spawn(|this, mut cx| {
538 async move {
539 let new_text = this.read_with(&cx, |this, cx| {
540 this.file.as_ref().and_then(|file| file.load_local(cx))
541 });
542 if let Some(new_text) = new_text {
543 let new_text = new_text.await?;
544 let diff = this
545 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
546 .await;
547 this.update(&mut cx, |this, cx| {
548 if this.apply_diff(diff, cx) {
549 this.saved_version = this.version();
550 this.saved_mtime = new_mtime;
551 cx.emit(Event::Reloaded);
552 }
553 });
554 }
555 Ok(())
556 }
557 .log_err()
558 .map(drop)
559 }));
560 }
561 }
562 }
563
564 if file_changed {
565 cx.emit(Event::FileHandleChanged);
566 }
567 self.file = Some(new_file);
568 task
569 }
570
571 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
572 cx.emit(Event::Closed);
573 }
574
575 pub fn language(&self) -> Option<&Arc<Language>> {
576 self.language.as_ref()
577 }
578
579 pub fn parse_count(&self) -> usize {
580 self.parse_count
581 }
582
583 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
584 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
585 self.interpolate_tree(syntax_tree);
586 Some(syntax_tree.tree.clone())
587 } else {
588 None
589 }
590 }
591
592 #[cfg(any(test, feature = "test-support"))]
593 pub fn is_parsing(&self) -> bool {
594 self.parsing_in_background
595 }
596
597 #[cfg(test)]
598 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
599 self.sync_parse_timeout = timeout;
600 }
601
602 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
603 if self.parsing_in_background {
604 return false;
605 }
606
607 if let Some(grammar) = self.grammar().cloned() {
608 let old_tree = self.syntax_tree();
609 let text = self.as_rope().clone();
610 let parsed_version = self.version();
611 let parse_task = cx.background().spawn({
612 let grammar = grammar.clone();
613 async move { Self::parse_text(&text, old_tree, &grammar) }
614 });
615
616 match cx
617 .background()
618 .block_with_timeout(self.sync_parse_timeout, parse_task)
619 {
620 Ok(new_tree) => {
621 self.did_finish_parsing(new_tree, parsed_version, cx);
622 return true;
623 }
624 Err(parse_task) => {
625 self.parsing_in_background = true;
626 cx.spawn(move |this, mut cx| async move {
627 let new_tree = parse_task.await;
628 this.update(&mut cx, move |this, cx| {
629 let grammar_changed = this
630 .grammar()
631 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
632 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
633 this.parsing_in_background = false;
634 this.did_finish_parsing(new_tree, parsed_version, cx);
635
636 if parse_again && this.reparse(cx) {
637 return;
638 }
639 });
640 })
641 .detach();
642 }
643 }
644 }
645 false
646 }
647
648 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
649 PARSER.with(|parser| {
650 let mut parser = parser.borrow_mut();
651 parser
652 .set_language(grammar.ts_language)
653 .expect("incompatible grammar");
654 let mut chunks = text.chunks_in_range(0..text.len());
655 let tree = parser
656 .parse_with(
657 &mut move |offset, _| {
658 chunks.seek(offset);
659 chunks.next().unwrap_or("").as_bytes()
660 },
661 old_tree.as_ref(),
662 )
663 .unwrap();
664 tree
665 })
666 }
667
668 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
669 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
670 let (bytes, lines) = edit.flatten();
671 tree.tree.edit(&InputEdit {
672 start_byte: bytes.new.start,
673 old_end_byte: bytes.new.start + bytes.old.len(),
674 new_end_byte: bytes.new.end,
675 start_position: lines.new.start.to_ts_point(),
676 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
677 .to_ts_point(),
678 new_end_position: lines.new.end.to_ts_point(),
679 });
680 }
681 tree.version = self.version();
682 }
683
684 fn did_finish_parsing(
685 &mut self,
686 tree: Tree,
687 version: clock::Global,
688 cx: &mut ModelContext<Self>,
689 ) {
690 self.parse_count += 1;
691 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
692 self.request_autoindent(cx);
693 cx.emit(Event::Reparsed);
694 cx.notify();
695 }
696
697 pub fn update_diagnostics(
698 &mut self,
699 version: Option<i32>,
700 mut diagnostics: Vec<lsp::Diagnostic>,
701 cx: &mut ModelContext<Self>,
702 ) -> Result<Operation> {
703 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
704
705 let version = version.map(|version| version as usize);
706 let content = if let Some(version) = version {
707 let language_server = self.language_server.as_mut().unwrap();
708 let snapshot = language_server
709 .pending_snapshots
710 .get(&version)
711 .ok_or_else(|| anyhow!("missing snapshot"))?;
712 &snapshot.buffer_snapshot
713 } else {
714 self.deref()
715 };
716 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
717
718 let empty_set = HashSet::new();
719 let disk_based_sources = self
720 .language
721 .as_ref()
722 .and_then(|language| language.disk_based_diagnostic_sources())
723 .unwrap_or(&empty_set);
724
725 let mut edits_since_save = content
726 .edits_since::<PointUtf16>(&self.saved_version)
727 .peekable();
728 let mut last_edit_old_end = PointUtf16::zero();
729 let mut last_edit_new_end = PointUtf16::zero();
730 let mut group_ids_by_diagnostic_range = HashMap::new();
731 let mut diagnostics_by_group_id = HashMap::new();
732 let mut next_group_id = 0;
733 'outer: for diagnostic in &diagnostics {
734 let mut start = diagnostic.range.start.to_point_utf16();
735 let mut end = diagnostic.range.end.to_point_utf16();
736 let source = diagnostic.source.as_ref();
737 let code = diagnostic.code.as_ref();
738 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
739 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
740 .copied()
741 .unwrap_or_else(|| {
742 let group_id = post_inc(&mut next_group_id);
743 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
744 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
745 }
746 group_id
747 });
748
749 if diagnostic
750 .source
751 .as_ref()
752 .map_or(false, |source| disk_based_sources.contains(source))
753 {
754 while let Some(edit) = edits_since_save.peek() {
755 if edit.old.end <= start {
756 last_edit_old_end = edit.old.end;
757 last_edit_new_end = edit.new.end;
758 edits_since_save.next();
759 } else if edit.old.start <= end && edit.old.end >= start {
760 continue 'outer;
761 } else {
762 break;
763 }
764 }
765
766 start = last_edit_new_end + (start - last_edit_old_end);
767 end = last_edit_new_end + (end - last_edit_old_end);
768 }
769
770 let mut range = content.clip_point_utf16(start, Bias::Left)
771 ..content.clip_point_utf16(end, Bias::Right);
772 if range.start == range.end {
773 range.end.column += 1;
774 range.end = content.clip_point_utf16(range.end, Bias::Right);
775 if range.start == range.end && range.end.column > 0 {
776 range.start.column -= 1;
777 range.start = content.clip_point_utf16(range.start, Bias::Left);
778 }
779 }
780
781 diagnostics_by_group_id
782 .entry(group_id)
783 .or_insert(Vec::new())
784 .push(DiagnosticEntry {
785 range,
786 diagnostic: Diagnostic {
787 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
788 message: diagnostic.message.clone(),
789 group_id,
790 is_primary: false,
791 },
792 });
793 }
794
795 drop(edits_since_save);
796 let new_diagnostics = DiagnosticSet::new(
797 diagnostics_by_group_id
798 .into_values()
799 .flat_map(|mut diagnostics| {
800 let primary = diagnostics
801 .iter_mut()
802 .min_by_key(|entry| entry.diagnostic.severity)
803 .unwrap();
804 primary.diagnostic.is_primary = true;
805 diagnostics
806 }),
807 content,
808 );
809 self.diagnostics = new_diagnostics;
810
811 if let Some(version) = version {
812 let language_server = self.language_server.as_mut().unwrap();
813 let versions_to_delete = language_server
814 .pending_snapshots
815 .range(..version)
816 .map(|(v, _)| *v)
817 .collect::<Vec<_>>();
818 for version in versions_to_delete {
819 language_server.pending_snapshots.remove(&version);
820 }
821 }
822
823 self.diagnostics_update_count += 1;
824 cx.notify();
825 cx.emit(Event::DiagnosticsUpdated);
826 Ok(Operation::UpdateDiagnostics {
827 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
828 lamport_timestamp: self.lamport_timestamp(),
829 })
830 }
831
832 pub fn diagnostics_in_range<'a, T, O>(
833 &'a self,
834 search_range: Range<T>,
835 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
836 where
837 T: 'a + ToOffset,
838 O: 'a + FromAnchor,
839 {
840 self.diagnostics.range(search_range, self, true)
841 }
842
843 pub fn diagnostic_group<'a, O>(
844 &'a self,
845 group_id: usize,
846 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
847 where
848 O: 'a + FromAnchor,
849 {
850 self.diagnostics.group(group_id, self)
851 }
852
853 pub fn diagnostics_update_count(&self) -> usize {
854 self.diagnostics_update_count
855 }
856
857 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
858 if let Some(indent_columns) = self.compute_autoindents() {
859 let indent_columns = cx.background().spawn(indent_columns);
860 match cx
861 .background()
862 .block_with_timeout(Duration::from_micros(500), indent_columns)
863 {
864 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
865 Err(indent_columns) => {
866 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
867 let indent_columns = indent_columns.await;
868 this.update(&mut cx, |this, cx| {
869 this.apply_autoindents(indent_columns, cx);
870 });
871 }));
872 }
873 }
874 }
875 }
876
877 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
878 let max_rows_between_yields = 100;
879 let snapshot = self.snapshot();
880 if snapshot.language.is_none()
881 || snapshot.tree.is_none()
882 || self.autoindent_requests.is_empty()
883 {
884 return None;
885 }
886
887 let autoindent_requests = self.autoindent_requests.clone();
888 Some(async move {
889 let mut indent_columns = BTreeMap::new();
890 for request in autoindent_requests {
891 let old_to_new_rows = request
892 .edited
893 .iter()
894 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
895 .zip(
896 request
897 .edited
898 .iter()
899 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
900 )
901 .collect::<BTreeMap<u32, u32>>();
902
903 let mut old_suggestions = HashMap::<u32, u32>::default();
904 let old_edited_ranges =
905 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
906 for old_edited_range in old_edited_ranges {
907 let suggestions = request
908 .before_edit
909 .suggest_autoindents(old_edited_range.clone())
910 .into_iter()
911 .flatten();
912 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
913 let indentation_basis = old_to_new_rows
914 .get(&suggestion.basis_row)
915 .and_then(|from_row| old_suggestions.get(from_row).copied())
916 .unwrap_or_else(|| {
917 request
918 .before_edit
919 .indent_column_for_line(suggestion.basis_row)
920 });
921 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
922 old_suggestions.insert(
923 *old_to_new_rows.get(&old_row).unwrap(),
924 indentation_basis + delta,
925 );
926 }
927 yield_now().await;
928 }
929
930 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
931 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
932 let new_edited_row_ranges =
933 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
934 for new_edited_row_range in new_edited_row_ranges {
935 let suggestions = snapshot
936 .suggest_autoindents(new_edited_row_range.clone())
937 .into_iter()
938 .flatten();
939 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
940 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
941 let new_indentation = indent_columns
942 .get(&suggestion.basis_row)
943 .copied()
944 .unwrap_or_else(|| {
945 snapshot.indent_column_for_line(suggestion.basis_row)
946 })
947 + delta;
948 if old_suggestions
949 .get(&new_row)
950 .map_or(true, |old_indentation| new_indentation != *old_indentation)
951 {
952 indent_columns.insert(new_row, new_indentation);
953 }
954 }
955 yield_now().await;
956 }
957
958 if let Some(inserted) = request.inserted.as_ref() {
959 let inserted_row_ranges = contiguous_ranges(
960 inserted
961 .iter()
962 .map(|range| range.to_point(&snapshot))
963 .flat_map(|range| range.start.row..range.end.row + 1),
964 max_rows_between_yields,
965 );
966 for inserted_row_range in inserted_row_ranges {
967 let suggestions = snapshot
968 .suggest_autoindents(inserted_row_range.clone())
969 .into_iter()
970 .flatten();
971 for (row, suggestion) in inserted_row_range.zip(suggestions) {
972 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
973 let new_indentation = indent_columns
974 .get(&suggestion.basis_row)
975 .copied()
976 .unwrap_or_else(|| {
977 snapshot.indent_column_for_line(suggestion.basis_row)
978 })
979 + delta;
980 indent_columns.insert(row, new_indentation);
981 }
982 yield_now().await;
983 }
984 }
985 }
986 indent_columns
987 })
988 }
989
990 fn apply_autoindents(
991 &mut self,
992 indent_columns: BTreeMap<u32, u32>,
993 cx: &mut ModelContext<Self>,
994 ) {
995 let selection_set_ids = self
996 .autoindent_requests
997 .drain(..)
998 .flat_map(|req| req.selection_set_ids.clone())
999 .collect::<HashSet<_>>();
1000
1001 self.start_transaction(selection_set_ids.iter().copied())
1002 .unwrap();
1003 for (row, indent_column) in &indent_columns {
1004 self.set_indent_column_for_line(*row, *indent_column, cx);
1005 }
1006
1007 for selection_set_id in &selection_set_ids {
1008 if let Ok(set) = self.selection_set(*selection_set_id) {
1009 let new_selections = set
1010 .selections::<Point>(&*self)
1011 .map(|selection| {
1012 if selection.start.column == 0 {
1013 let delta = Point::new(
1014 0,
1015 indent_columns
1016 .get(&selection.start.row)
1017 .copied()
1018 .unwrap_or(0),
1019 );
1020 if delta.column > 0 {
1021 return Selection {
1022 id: selection.id,
1023 goal: selection.goal,
1024 reversed: selection.reversed,
1025 start: selection.start + delta,
1026 end: selection.end + delta,
1027 };
1028 }
1029 }
1030 selection
1031 })
1032 .collect::<Vec<_>>();
1033 self.update_selection_set(*selection_set_id, &new_selections, cx)
1034 .unwrap();
1035 }
1036 }
1037
1038 self.end_transaction(selection_set_ids.iter().copied(), cx)
1039 .unwrap();
1040 }
1041
1042 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1043 let current_column = self.indent_column_for_line(row);
1044 if column > current_column {
1045 let offset = Point::new(row, 0).to_offset(&*self);
1046 self.edit(
1047 [offset..offset],
1048 " ".repeat((column - current_column) as usize),
1049 cx,
1050 );
1051 } else if column < current_column {
1052 self.edit(
1053 [Point::new(row, 0)..Point::new(row, current_column - column)],
1054 "",
1055 cx,
1056 );
1057 }
1058 }
1059
1060 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1061 if let Some(tree) = self.syntax_tree() {
1062 let root = tree.root_node();
1063 let range = range.start.to_offset(self)..range.end.to_offset(self);
1064 let mut node = root.descendant_for_byte_range(range.start, range.end);
1065 while node.map_or(false, |n| n.byte_range() == range) {
1066 node = node.unwrap().parent();
1067 }
1068 node.map(|n| n.byte_range())
1069 } else {
1070 None
1071 }
1072 }
1073
1074 pub fn enclosing_bracket_ranges<T: ToOffset>(
1075 &self,
1076 range: Range<T>,
1077 ) -> Option<(Range<usize>, Range<usize>)> {
1078 let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
1079 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1080 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1081
1082 // Find bracket pairs that *inclusively* contain the given range.
1083 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1084 let mut cursor = QueryCursorHandle::new();
1085 let matches = cursor.set_byte_range(range).matches(
1086 &grammar.brackets_query,
1087 tree.root_node(),
1088 TextProvider(self.as_rope()),
1089 );
1090
1091 // Get the ranges of the innermost pair of brackets.
1092 matches
1093 .filter_map(|mat| {
1094 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1095 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1096 Some((open.byte_range(), close.byte_range()))
1097 })
1098 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1099 }
1100
1101 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1102 // TODO: it would be nice to not allocate here.
1103 let old_text = self.text();
1104 let base_version = self.version();
1105 cx.background().spawn(async move {
1106 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1107 .iter_all_changes()
1108 .map(|c| (c.tag(), c.value().len()))
1109 .collect::<Vec<_>>();
1110 Diff {
1111 base_version,
1112 new_text,
1113 changes,
1114 }
1115 })
1116 }
1117
1118 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1119 if self.version == diff.base_version {
1120 self.start_transaction(None).unwrap();
1121 let mut offset = 0;
1122 for (tag, len) in diff.changes {
1123 let range = offset..(offset + len);
1124 match tag {
1125 ChangeTag::Equal => offset += len,
1126 ChangeTag::Delete => self.edit(Some(range), "", cx),
1127 ChangeTag::Insert => {
1128 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1129 offset += len;
1130 }
1131 }
1132 }
1133 self.end_transaction(None, cx).unwrap();
1134 true
1135 } else {
1136 false
1137 }
1138 }
1139
1140 pub fn is_dirty(&self) -> bool {
1141 !self.saved_version.ge(&self.version)
1142 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1143 }
1144
1145 pub fn has_conflict(&self) -> bool {
1146 !self.saved_version.ge(&self.version)
1147 && self
1148 .file
1149 .as_ref()
1150 .map_or(false, |file| file.mtime() > self.saved_mtime)
1151 }
1152
1153 pub fn subscribe(&mut self) -> Subscription {
1154 self.text.subscribe()
1155 }
1156
1157 pub fn start_transaction(
1158 &mut self,
1159 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1160 ) -> Result<()> {
1161 self.start_transaction_at(selection_set_ids, Instant::now())
1162 }
1163
1164 pub(crate) fn start_transaction_at(
1165 &mut self,
1166 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1167 now: Instant,
1168 ) -> Result<()> {
1169 self.text.start_transaction_at(selection_set_ids, now)
1170 }
1171
1172 pub fn end_transaction(
1173 &mut self,
1174 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1175 cx: &mut ModelContext<Self>,
1176 ) -> Result<()> {
1177 self.end_transaction_at(selection_set_ids, Instant::now(), cx)
1178 }
1179
1180 pub(crate) fn end_transaction_at(
1181 &mut self,
1182 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1183 now: Instant,
1184 cx: &mut ModelContext<Self>,
1185 ) -> Result<()> {
1186 if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) {
1187 let was_dirty = start_version != self.saved_version;
1188 self.did_edit(&start_version, was_dirty, cx);
1189 }
1190 Ok(())
1191 }
1192
1193 fn update_language_server(&mut self) {
1194 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1195 language_server
1196 } else {
1197 return;
1198 };
1199 let abs_path = self
1200 .file
1201 .as_ref()
1202 .map_or(Path::new("/").to_path_buf(), |file| {
1203 file.abs_path().unwrap()
1204 });
1205
1206 let version = post_inc(&mut language_server.next_version);
1207 let snapshot = LanguageServerSnapshot {
1208 buffer_snapshot: self.text.snapshot(),
1209 version,
1210 path: Arc::from(abs_path),
1211 };
1212 language_server
1213 .pending_snapshots
1214 .insert(version, snapshot.clone());
1215 let _ = language_server
1216 .latest_snapshot
1217 .blocking_send(Some(snapshot));
1218 }
1219
1220 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1221 where
1222 I: IntoIterator<Item = Range<S>>,
1223 S: ToOffset,
1224 T: Into<String>,
1225 {
1226 self.edit_internal(ranges_iter, new_text, false, cx)
1227 }
1228
1229 pub fn edit_with_autoindent<I, S, T>(
1230 &mut self,
1231 ranges_iter: I,
1232 new_text: T,
1233 cx: &mut ModelContext<Self>,
1234 ) where
1235 I: IntoIterator<Item = Range<S>>,
1236 S: ToOffset,
1237 T: Into<String>,
1238 {
1239 self.edit_internal(ranges_iter, new_text, true, cx)
1240 }
1241
1242 pub fn edit_internal<I, S, T>(
1243 &mut self,
1244 ranges_iter: I,
1245 new_text: T,
1246 autoindent: bool,
1247 cx: &mut ModelContext<Self>,
1248 ) where
1249 I: IntoIterator<Item = Range<S>>,
1250 S: ToOffset,
1251 T: Into<String>,
1252 {
1253 let new_text = new_text.into();
1254
1255 // Skip invalid ranges and coalesce contiguous ones.
1256 let mut ranges: Vec<Range<usize>> = Vec::new();
1257 for range in ranges_iter {
1258 let range = range.start.to_offset(self)..range.end.to_offset(self);
1259 if !new_text.is_empty() || !range.is_empty() {
1260 if let Some(prev_range) = ranges.last_mut() {
1261 if prev_range.end >= range.start {
1262 prev_range.end = cmp::max(prev_range.end, range.end);
1263 } else {
1264 ranges.push(range);
1265 }
1266 } else {
1267 ranges.push(range);
1268 }
1269 }
1270 }
1271 if ranges.is_empty() {
1272 return;
1273 }
1274
1275 self.start_transaction(None).unwrap();
1276 self.pending_autoindent.take();
1277 let autoindent_request = if autoindent && self.language.is_some() {
1278 let before_edit = self.snapshot();
1279 let edited = ranges
1280 .iter()
1281 .filter_map(|range| {
1282 let start = range.start.to_point(self);
1283 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1284 None
1285 } else {
1286 Some(self.anchor_before(range.start))
1287 }
1288 })
1289 .collect();
1290 Some((before_edit, edited))
1291 } else {
1292 None
1293 };
1294
1295 let first_newline_ix = new_text.find('\n');
1296 let new_text_len = new_text.len();
1297
1298 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1299
1300 if let Some((before_edit, edited)) = autoindent_request {
1301 let mut inserted = None;
1302 if let Some(first_newline_ix) = first_newline_ix {
1303 let mut delta = 0isize;
1304 inserted = Some(
1305 ranges
1306 .iter()
1307 .map(|range| {
1308 let start =
1309 (delta + range.start as isize) as usize + first_newline_ix + 1;
1310 let end = (delta + range.start as isize) as usize + new_text_len;
1311 delta +=
1312 (range.end as isize - range.start as isize) + new_text_len as isize;
1313 self.anchor_before(start)..self.anchor_after(end)
1314 })
1315 .collect(),
1316 );
1317 }
1318
1319 let selection_set_ids = self
1320 .text
1321 .peek_undo_stack()
1322 .unwrap()
1323 .starting_selection_set_ids()
1324 .collect();
1325 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1326 selection_set_ids,
1327 before_edit,
1328 edited,
1329 inserted,
1330 }));
1331 }
1332
1333 self.end_transaction(None, cx).unwrap();
1334 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1335 }
1336
1337 fn did_edit(
1338 &mut self,
1339 old_version: &clock::Global,
1340 was_dirty: bool,
1341 cx: &mut ModelContext<Self>,
1342 ) {
1343 if self.edits_since::<usize>(old_version).next().is_none() {
1344 return;
1345 }
1346
1347 self.reparse(cx);
1348 self.update_language_server();
1349
1350 cx.emit(Event::Edited);
1351 if !was_dirty {
1352 cx.emit(Event::Dirtied);
1353 }
1354 cx.notify();
1355 }
1356
1357 fn grammar(&self) -> Option<&Arc<Grammar>> {
1358 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1359 }
1360
1361 pub fn add_selection_set<T: ToOffset>(
1362 &mut self,
1363 selections: &[Selection<T>],
1364 cx: &mut ModelContext<Self>,
1365 ) -> SelectionSetId {
1366 let operation = self.text.add_selection_set(selections);
1367 if let text::Operation::UpdateSelections { set_id, .. } = &operation {
1368 let set_id = *set_id;
1369 cx.notify();
1370 self.send_operation(Operation::Buffer(operation), cx);
1371 set_id
1372 } else {
1373 unreachable!()
1374 }
1375 }
1376
1377 pub fn update_selection_set<T: ToOffset>(
1378 &mut self,
1379 set_id: SelectionSetId,
1380 selections: &[Selection<T>],
1381 cx: &mut ModelContext<Self>,
1382 ) -> Result<()> {
1383 let operation = self.text.update_selection_set(set_id, selections)?;
1384 cx.notify();
1385 self.send_operation(Operation::Buffer(operation), cx);
1386 Ok(())
1387 }
1388
1389 pub fn set_active_selection_set(
1390 &mut self,
1391 set_id: Option<SelectionSetId>,
1392 cx: &mut ModelContext<Self>,
1393 ) -> Result<()> {
1394 let operation = self.text.set_active_selection_set(set_id)?;
1395 self.send_operation(Operation::Buffer(operation), cx);
1396 Ok(())
1397 }
1398
1399 pub fn remove_selection_set(
1400 &mut self,
1401 set_id: SelectionSetId,
1402 cx: &mut ModelContext<Self>,
1403 ) -> Result<()> {
1404 let operation = self.text.remove_selection_set(set_id)?;
1405 cx.notify();
1406 self.send_operation(Operation::Buffer(operation), cx);
1407 Ok(())
1408 }
1409
1410 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1411 &mut self,
1412 ops: I,
1413 cx: &mut ModelContext<Self>,
1414 ) -> Result<()> {
1415 self.pending_autoindent.take();
1416 let was_dirty = self.is_dirty();
1417 let old_version = self.version.clone();
1418 let mut deferred_ops = Vec::new();
1419 let buffer_ops = ops
1420 .into_iter()
1421 .filter_map(|op| match op {
1422 Operation::Buffer(op) => Some(op),
1423 _ => {
1424 if self.can_apply_op(&op) {
1425 self.apply_op(op, cx);
1426 } else {
1427 deferred_ops.push(op);
1428 }
1429 None
1430 }
1431 })
1432 .collect::<Vec<_>>();
1433 self.text.apply_ops(buffer_ops)?;
1434 self.flush_deferred_ops(cx);
1435 self.did_edit(&old_version, was_dirty, cx);
1436 // Notify independently of whether the buffer was edited as the operations could include a
1437 // selection update.
1438 cx.notify();
1439 Ok(())
1440 }
1441
1442 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1443 let mut deferred_ops = Vec::new();
1444 for op in self.deferred_ops.drain().iter().cloned() {
1445 if self.can_apply_op(&op) {
1446 self.apply_op(op, cx);
1447 } else {
1448 deferred_ops.push(op);
1449 }
1450 }
1451 self.deferred_ops.insert(deferred_ops);
1452 }
1453
1454 fn can_apply_op(&self, operation: &Operation) -> bool {
1455 match operation {
1456 Operation::Buffer(_) => {
1457 unreachable!("buffer operations should never be applied at this layer")
1458 }
1459 Operation::UpdateDiagnostics { diagnostics, .. } => {
1460 diagnostics.iter().all(|diagnostic| {
1461 self.text.can_resolve(&diagnostic.range.start)
1462 && self.text.can_resolve(&diagnostic.range.end)
1463 })
1464 }
1465 }
1466 }
1467
1468 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1469 match operation {
1470 Operation::Buffer(_) => {
1471 unreachable!("buffer operations should never be applied at this layer")
1472 }
1473 Operation::UpdateDiagnostics { diagnostics, .. } => {
1474 self.apply_diagnostic_update(diagnostics, cx);
1475 }
1476 }
1477 }
1478
1479 fn apply_diagnostic_update(
1480 &mut self,
1481 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1482 cx: &mut ModelContext<Self>,
1483 ) {
1484 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1485 self.diagnostics_update_count += 1;
1486 cx.notify();
1487 }
1488
1489 #[cfg(not(test))]
1490 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1491 if let Some(file) = &self.file {
1492 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1493 }
1494 }
1495
1496 #[cfg(test)]
1497 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1498 self.operations.push(operation);
1499 }
1500
1501 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1502 self.text.remove_peer(replica_id);
1503 cx.notify();
1504 }
1505
1506 pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
1507 let was_dirty = self.is_dirty();
1508 let old_version = self.version.clone();
1509
1510 for operation in self.text.undo() {
1511 self.send_operation(Operation::Buffer(operation), cx);
1512 }
1513
1514 self.did_edit(&old_version, was_dirty, cx);
1515 }
1516
1517 pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
1518 let was_dirty = self.is_dirty();
1519 let old_version = self.version.clone();
1520
1521 for operation in self.text.redo() {
1522 self.send_operation(Operation::Buffer(operation), cx);
1523 }
1524
1525 self.did_edit(&old_version, was_dirty, cx);
1526 }
1527}
1528
1529#[cfg(any(test, feature = "test-support"))]
1530impl Buffer {
1531 pub fn randomly_edit<T>(
1532 &mut self,
1533 rng: &mut T,
1534 old_range_count: usize,
1535 cx: &mut ModelContext<Self>,
1536 ) where
1537 T: rand::Rng,
1538 {
1539 self.start_transaction(None).unwrap();
1540 self.text.randomly_edit(rng, old_range_count);
1541 self.end_transaction(None, cx).unwrap();
1542 }
1543
1544 pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
1545 where
1546 T: rand::Rng,
1547 {
1548 self.start_transaction(None).unwrap();
1549 self.text.randomly_mutate(rng);
1550 self.end_transaction(None, cx).unwrap();
1551 }
1552}
1553
1554impl Entity for Buffer {
1555 type Event = Event;
1556
1557 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1558 if let Some(file) = self.file.as_ref() {
1559 file.buffer_removed(self.remote_id(), cx);
1560 }
1561 }
1562}
1563
1564impl Deref for Buffer {
1565 type Target = TextBuffer;
1566
1567 fn deref(&self) -> &Self::Target {
1568 &self.text
1569 }
1570}
1571
1572impl BufferSnapshot {
1573 fn suggest_autoindents<'a>(
1574 &'a self,
1575 row_range: Range<u32>,
1576 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1577 let mut query_cursor = QueryCursorHandle::new();
1578 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1579 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1580
1581 // Get the "indentation ranges" that intersect this row range.
1582 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1583 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1584 query_cursor.set_point_range(
1585 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1586 ..Point::new(row_range.end, 0).to_ts_point(),
1587 );
1588 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1589 for mat in query_cursor.matches(
1590 &grammar.indents_query,
1591 tree.root_node(),
1592 TextProvider(self.as_rope()),
1593 ) {
1594 let mut node_kind = "";
1595 let mut start: Option<Point> = None;
1596 let mut end: Option<Point> = None;
1597 for capture in mat.captures {
1598 if Some(capture.index) == indent_capture_ix {
1599 node_kind = capture.node.kind();
1600 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1601 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1602 } else if Some(capture.index) == end_capture_ix {
1603 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1604 }
1605 }
1606
1607 if let Some((start, end)) = start.zip(end) {
1608 if start.row == end.row {
1609 continue;
1610 }
1611
1612 let range = start..end;
1613 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1614 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1615 Ok(ix) => {
1616 let prev_range = &mut indentation_ranges[ix];
1617 prev_range.0.end = prev_range.0.end.max(range.end);
1618 }
1619 }
1620 }
1621 }
1622
1623 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1624 Some(row_range.map(move |row| {
1625 let row_start = Point::new(row, self.indent_column_for_line(row));
1626
1627 let mut indent_from_prev_row = false;
1628 let mut outdent_to_row = u32::MAX;
1629 for (range, _node_kind) in &indentation_ranges {
1630 if range.start.row >= row {
1631 break;
1632 }
1633
1634 if range.start.row == prev_row && range.end > row_start {
1635 indent_from_prev_row = true;
1636 }
1637 if range.end.row >= prev_row && range.end <= row_start {
1638 outdent_to_row = outdent_to_row.min(range.start.row);
1639 }
1640 }
1641
1642 let suggestion = if outdent_to_row == prev_row {
1643 IndentSuggestion {
1644 basis_row: prev_row,
1645 indent: false,
1646 }
1647 } else if indent_from_prev_row {
1648 IndentSuggestion {
1649 basis_row: prev_row,
1650 indent: true,
1651 }
1652 } else if outdent_to_row < prev_row {
1653 IndentSuggestion {
1654 basis_row: outdent_to_row,
1655 indent: false,
1656 }
1657 } else {
1658 IndentSuggestion {
1659 basis_row: prev_row,
1660 indent: false,
1661 }
1662 };
1663
1664 prev_row = row;
1665 suggestion
1666 }))
1667 } else {
1668 None
1669 }
1670 }
1671
1672 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1673 while row > 0 {
1674 row -= 1;
1675 if !self.is_line_blank(row) {
1676 return Some(row);
1677 }
1678 }
1679 None
1680 }
1681
1682 pub fn chunks<'a, T: ToOffset>(
1683 &'a self,
1684 range: Range<T>,
1685 theme: Option<&'a SyntaxTheme>,
1686 ) -> BufferChunks<'a> {
1687 let range = range.start.to_offset(self)..range.end.to_offset(self);
1688
1689 let mut highlights = None;
1690 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1691 if let Some(theme) = theme {
1692 for entry in self
1693 .diagnostics
1694 .range::<_, usize>(range.clone(), self, true)
1695 {
1696 diagnostic_endpoints.push(DiagnosticEndpoint {
1697 offset: entry.range.start,
1698 is_start: true,
1699 severity: entry.diagnostic.severity,
1700 });
1701 diagnostic_endpoints.push(DiagnosticEndpoint {
1702 offset: entry.range.end,
1703 is_start: false,
1704 severity: entry.diagnostic.severity,
1705 });
1706 }
1707 diagnostic_endpoints
1708 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1709
1710 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1711 let mut query_cursor = QueryCursorHandle::new();
1712
1713 // TODO - add a Tree-sitter API to remove the need for this.
1714 let cursor = unsafe {
1715 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1716 };
1717 let captures = cursor.set_byte_range(range.clone()).captures(
1718 &grammar.highlights_query,
1719 tree.root_node(),
1720 TextProvider(self.text.as_rope()),
1721 );
1722 highlights = Some(BufferChunkHighlights {
1723 captures,
1724 next_capture: None,
1725 stack: Default::default(),
1726 highlight_map: grammar.highlight_map(),
1727 _query_cursor: query_cursor,
1728 theme,
1729 })
1730 }
1731 }
1732
1733 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1734 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1735
1736 BufferChunks {
1737 range,
1738 chunks,
1739 diagnostic_endpoints,
1740 error_depth: 0,
1741 warning_depth: 0,
1742 information_depth: 0,
1743 hint_depth: 0,
1744 highlights,
1745 }
1746 }
1747
1748 fn grammar(&self) -> Option<&Arc<Grammar>> {
1749 self.language
1750 .as_ref()
1751 .and_then(|language| language.grammar.as_ref())
1752 }
1753
1754 pub fn diagnostics_update_count(&self) -> usize {
1755 self.diagnostics_update_count
1756 }
1757
1758 pub fn parse_count(&self) -> usize {
1759 self.parse_count
1760 }
1761}
1762
1763impl Clone for BufferSnapshot {
1764 fn clone(&self) -> Self {
1765 Self {
1766 text: self.text.clone(),
1767 tree: self.tree.clone(),
1768 diagnostics: self.diagnostics.clone(),
1769 diagnostics_update_count: self.diagnostics_update_count,
1770 is_parsing: self.is_parsing,
1771 language: self.language.clone(),
1772 parse_count: self.parse_count,
1773 }
1774 }
1775}
1776
1777impl Deref for BufferSnapshot {
1778 type Target = text::BufferSnapshot;
1779
1780 fn deref(&self) -> &Self::Target {
1781 &self.text
1782 }
1783}
1784
1785impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1786 type I = ByteChunks<'a>;
1787
1788 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1789 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1790 }
1791}
1792
1793struct ByteChunks<'a>(rope::Chunks<'a>);
1794
1795impl<'a> Iterator for ByteChunks<'a> {
1796 type Item = &'a [u8];
1797
1798 fn next(&mut self) -> Option<Self::Item> {
1799 self.0.next().map(str::as_bytes)
1800 }
1801}
1802
1803unsafe impl<'a> Send for BufferChunks<'a> {}
1804
1805impl<'a> BufferChunks<'a> {
1806 pub fn seek(&mut self, offset: usize) {
1807 self.range.start = offset;
1808 self.chunks.seek(self.range.start);
1809 if let Some(highlights) = self.highlights.as_mut() {
1810 highlights
1811 .stack
1812 .retain(|(end_offset, _)| *end_offset > offset);
1813 if let Some((mat, capture_ix)) = &highlights.next_capture {
1814 let capture = mat.captures[*capture_ix as usize];
1815 if offset >= capture.node.start_byte() {
1816 let next_capture_end = capture.node.end_byte();
1817 if offset < next_capture_end {
1818 highlights.stack.push((
1819 next_capture_end,
1820 highlights.highlight_map.get(capture.index),
1821 ));
1822 }
1823 highlights.next_capture.take();
1824 }
1825 }
1826 highlights.captures.set_byte_range(self.range.clone());
1827 }
1828 }
1829
1830 pub fn offset(&self) -> usize {
1831 self.range.start
1832 }
1833
1834 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1835 let depth = match endpoint.severity {
1836 DiagnosticSeverity::ERROR => &mut self.error_depth,
1837 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1838 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1839 DiagnosticSeverity::HINT => &mut self.hint_depth,
1840 _ => return,
1841 };
1842 if endpoint.is_start {
1843 *depth += 1;
1844 } else {
1845 *depth -= 1;
1846 }
1847 }
1848
1849 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1850 if self.error_depth > 0 {
1851 Some(DiagnosticSeverity::ERROR)
1852 } else if self.warning_depth > 0 {
1853 Some(DiagnosticSeverity::WARNING)
1854 } else if self.information_depth > 0 {
1855 Some(DiagnosticSeverity::INFORMATION)
1856 } else if self.hint_depth > 0 {
1857 Some(DiagnosticSeverity::HINT)
1858 } else {
1859 None
1860 }
1861 }
1862}
1863
1864impl<'a> Iterator for BufferChunks<'a> {
1865 type Item = Chunk<'a>;
1866
1867 fn next(&mut self) -> Option<Self::Item> {
1868 let mut next_capture_start = usize::MAX;
1869 let mut next_diagnostic_endpoint = usize::MAX;
1870
1871 if let Some(highlights) = self.highlights.as_mut() {
1872 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1873 if *parent_capture_end <= self.range.start {
1874 highlights.stack.pop();
1875 } else {
1876 break;
1877 }
1878 }
1879
1880 if highlights.next_capture.is_none() {
1881 highlights.next_capture = highlights.captures.next();
1882 }
1883
1884 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1885 let capture = mat.captures[*capture_ix as usize];
1886 if self.range.start < capture.node.start_byte() {
1887 next_capture_start = capture.node.start_byte();
1888 break;
1889 } else {
1890 let highlight_id = highlights.highlight_map.get(capture.index);
1891 highlights
1892 .stack
1893 .push((capture.node.end_byte(), highlight_id));
1894 highlights.next_capture = highlights.captures.next();
1895 }
1896 }
1897 }
1898
1899 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1900 if endpoint.offset <= self.range.start {
1901 self.update_diagnostic_depths(endpoint);
1902 self.diagnostic_endpoints.next();
1903 } else {
1904 next_diagnostic_endpoint = endpoint.offset;
1905 break;
1906 }
1907 }
1908
1909 if let Some(chunk) = self.chunks.peek() {
1910 let chunk_start = self.range.start;
1911 let mut chunk_end = (self.chunks.offset() + chunk.len())
1912 .min(next_capture_start)
1913 .min(next_diagnostic_endpoint);
1914 let mut highlight_style = None;
1915 if let Some(highlights) = self.highlights.as_ref() {
1916 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1917 chunk_end = chunk_end.min(*parent_capture_end);
1918 highlight_style = parent_highlight_id.style(highlights.theme);
1919 }
1920 }
1921
1922 let slice =
1923 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1924 self.range.start = chunk_end;
1925 if self.range.start == self.chunks.offset() + chunk.len() {
1926 self.chunks.next().unwrap();
1927 }
1928
1929 Some(Chunk {
1930 text: slice,
1931 highlight_style,
1932 diagnostic: self.current_diagnostic_severity(),
1933 })
1934 } else {
1935 None
1936 }
1937 }
1938}
1939
1940impl QueryCursorHandle {
1941 fn new() -> Self {
1942 QueryCursorHandle(Some(
1943 QUERY_CURSORS
1944 .lock()
1945 .pop()
1946 .unwrap_or_else(|| QueryCursor::new()),
1947 ))
1948 }
1949}
1950
1951impl Deref for QueryCursorHandle {
1952 type Target = QueryCursor;
1953
1954 fn deref(&self) -> &Self::Target {
1955 self.0.as_ref().unwrap()
1956 }
1957}
1958
1959impl DerefMut for QueryCursorHandle {
1960 fn deref_mut(&mut self) -> &mut Self::Target {
1961 self.0.as_mut().unwrap()
1962 }
1963}
1964
1965impl Drop for QueryCursorHandle {
1966 fn drop(&mut self) {
1967 let mut cursor = self.0.take().unwrap();
1968 cursor.set_byte_range(0..usize::MAX);
1969 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1970 QUERY_CURSORS.lock().push(cursor)
1971 }
1972}
1973
1974trait ToTreeSitterPoint {
1975 fn to_ts_point(self) -> tree_sitter::Point;
1976 fn from_ts_point(point: tree_sitter::Point) -> Self;
1977}
1978
1979impl ToTreeSitterPoint for Point {
1980 fn to_ts_point(self) -> tree_sitter::Point {
1981 tree_sitter::Point::new(self.row as usize, self.column as usize)
1982 }
1983
1984 fn from_ts_point(point: tree_sitter::Point) -> Self {
1985 Point::new(point.row as u32, point.column as u32)
1986 }
1987}
1988
1989trait ToPointUtf16 {
1990 fn to_point_utf16(self) -> PointUtf16;
1991}
1992
1993impl ToPointUtf16 for lsp::Position {
1994 fn to_point_utf16(self) -> PointUtf16 {
1995 PointUtf16::new(self.line, self.character)
1996 }
1997}
1998
1999impl operation_queue::Operation for Operation {
2000 fn lamport_timestamp(&self) -> clock::Lamport {
2001 match self {
2002 Operation::Buffer(_) => {
2003 unreachable!("buffer operations should never be deferred at this layer")
2004 }
2005 Operation::UpdateDiagnostics {
2006 lamport_timestamp, ..
2007 } => *lamport_timestamp,
2008 }
2009 }
2010}
2011
2012fn diagnostic_ranges<'a>(
2013 diagnostic: &'a lsp::Diagnostic,
2014 abs_path: Option<&'a Path>,
2015) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
2016 diagnostic
2017 .related_information
2018 .iter()
2019 .flatten()
2020 .filter_map(move |info| {
2021 if info.location.uri.to_file_path().ok()? == abs_path? {
2022 let info_start = PointUtf16::new(
2023 info.location.range.start.line,
2024 info.location.range.start.character,
2025 );
2026 let info_end = PointUtf16::new(
2027 info.location.range.end.line,
2028 info.location.range.end.character,
2029 );
2030 Some(info_start..info_end)
2031 } else {
2032 None
2033 }
2034 })
2035 .chain(Some(
2036 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
2037 ))
2038}
2039
2040pub fn contiguous_ranges(
2041 values: impl Iterator<Item = u32>,
2042 max_len: usize,
2043) -> impl Iterator<Item = Range<u32>> {
2044 let mut values = values.into_iter();
2045 let mut current_range: Option<Range<u32>> = None;
2046 std::iter::from_fn(move || loop {
2047 if let Some(value) = values.next() {
2048 if let Some(range) = &mut current_range {
2049 if value == range.end && range.len() < max_len {
2050 range.end += 1;
2051 continue;
2052 }
2053 }
2054
2055 let prev_range = current_range.clone();
2056 current_range = Some(value..(value + 1));
2057 if prev_range.is_some() {
2058 return prev_range;
2059 }
2060 } else {
2061 return current_range.take();
2062 }
2063 })
2064}