1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use text::operation_queue::OperationQueue;
34pub use text::{Buffer as TextBuffer, Operation as _, *};
35use theme::SyntaxTheme;
36use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
37use util::{post_inc, TryFutureExt as _};
38
39#[cfg(any(test, feature = "test-support"))]
40pub use tree_sitter_rust;
41
42pub use lsp::DiagnosticSeverity;
43
44thread_local! {
45 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
46}
47
48lazy_static! {
49 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
50}
51
52// TODO - Make this configurable
53const INDENT_SIZE: u32 = 4;
54
55pub struct Buffer {
56 text: TextBuffer,
57 file: Option<Box<dyn File>>,
58 saved_version: clock::Global,
59 saved_mtime: SystemTime,
60 language: Option<Arc<Language>>,
61 autoindent_requests: Vec<Arc<AutoindentRequest>>,
62 pending_autoindent: Option<Task<()>>,
63 sync_parse_timeout: Duration,
64 syntax_tree: Mutex<Option<SyntaxTree>>,
65 parsing_in_background: bool,
66 parse_count: usize,
67 diagnostics: DiagnosticSet,
68 diagnostics_update_count: usize,
69 language_server: Option<LanguageServerState>,
70 deferred_ops: OperationQueue<Operation>,
71 #[cfg(test)]
72 pub(crate) operations: Vec<Operation>,
73}
74
75pub struct Snapshot {
76 text: text::Snapshot,
77 tree: Option<Tree>,
78 diagnostics: DiagnosticSet,
79 diagnostics_update_count: usize,
80 is_parsing: bool,
81 language: Option<Arc<Language>>,
82 parse_count: usize,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86pub struct Diagnostic {
87 pub severity: DiagnosticSeverity,
88 pub message: String,
89 pub group_id: usize,
90 pub is_primary: bool,
91}
92
93struct LanguageServerState {
94 server: Arc<LanguageServer>,
95 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
96 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
97 next_version: usize,
98 _maintain_server: Task<Option<()>>,
99}
100
101#[derive(Clone)]
102struct LanguageServerSnapshot {
103 buffer_snapshot: text::Snapshot,
104 version: usize,
105 path: Arc<Path>,
106}
107
108#[derive(Clone, Debug)]
109pub enum Operation {
110 Buffer(text::Operation),
111 UpdateDiagnostics {
112 diagnostics: Arc<[DiagnosticEntry]>,
113 lamport_timestamp: clock::Lamport,
114 },
115}
116
117#[derive(Clone, Debug, Eq, PartialEq)]
118pub enum Event {
119 Edited,
120 Dirtied,
121 Saved,
122 FileHandleChanged,
123 Reloaded,
124 Reparsed,
125 DiagnosticsUpdated,
126 Closed,
127}
128
129pub trait File {
130 fn worktree_id(&self) -> usize;
131
132 fn entry_id(&self) -> Option<usize>;
133
134 fn mtime(&self) -> SystemTime;
135
136 /// Returns the path of this file relative to the worktree's root directory.
137 fn path(&self) -> &Arc<Path>;
138
139 /// Returns the absolute path of this file.
140 fn abs_path(&self) -> Option<PathBuf>;
141
142 /// Returns the path of this file relative to the worktree's parent directory (this means it
143 /// includes the name of the worktree's root folder).
144 fn full_path(&self) -> PathBuf;
145
146 /// Returns the last component of this handle's absolute path. If this handle refers to the root
147 /// of its worktree, then this method will return the name of the worktree itself.
148 fn file_name(&self) -> Option<OsString>;
149
150 fn is_deleted(&self) -> bool;
151
152 fn save(
153 &self,
154 buffer_id: u64,
155 text: Rope,
156 version: clock::Global,
157 cx: &mut MutableAppContext,
158 ) -> Task<Result<(clock::Global, SystemTime)>>;
159
160 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
161
162 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
163
164 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
165
166 fn boxed_clone(&self) -> Box<dyn File>;
167
168 fn as_any(&self) -> &dyn Any;
169}
170
171struct QueryCursorHandle(Option<QueryCursor>);
172
173#[derive(Clone)]
174struct SyntaxTree {
175 tree: Tree,
176 version: clock::Global,
177}
178
179#[derive(Clone)]
180struct AutoindentRequest {
181 selection_set_ids: HashSet<SelectionSetId>,
182 before_edit: Snapshot,
183 edited: Vec<Anchor>,
184 inserted: Option<Vec<Range<Anchor>>>,
185}
186
187#[derive(Debug)]
188struct IndentSuggestion {
189 basis_row: u32,
190 indent: bool,
191}
192
193struct TextProvider<'a>(&'a Rope);
194
195struct Highlights<'a> {
196 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
197 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
198 stack: Vec<(usize, HighlightId)>,
199 highlight_map: HighlightMap,
200 theme: &'a SyntaxTheme,
201 _query_cursor: QueryCursorHandle,
202}
203
204pub struct Chunks<'a> {
205 range: Range<usize>,
206 chunks: rope::Chunks<'a>,
207 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
208 error_depth: usize,
209 warning_depth: usize,
210 information_depth: usize,
211 hint_depth: usize,
212 highlights: Option<Highlights<'a>>,
213}
214
215#[derive(Clone, Copy, Debug, Default)]
216pub struct Chunk<'a> {
217 pub text: &'a str,
218 pub highlight_style: Option<HighlightStyle>,
219 pub diagnostic: Option<DiagnosticSeverity>,
220}
221
222pub(crate) struct Diff {
223 base_version: clock::Global,
224 new_text: Arc<str>,
225 changes: Vec<(ChangeTag, usize)>,
226}
227
228#[derive(Clone, Copy)]
229struct DiagnosticEndpoint {
230 offset: usize,
231 is_start: bool,
232 severity: DiagnosticSeverity,
233}
234
235impl Buffer {
236 pub fn new<T: Into<Arc<str>>>(
237 replica_id: ReplicaId,
238 base_text: T,
239 cx: &mut ModelContext<Self>,
240 ) -> Self {
241 Self::build(
242 TextBuffer::new(
243 replica_id,
244 cx.model_id() as u64,
245 History::new(base_text.into()),
246 ),
247 None,
248 )
249 }
250
251 pub fn from_file<T: Into<Arc<str>>>(
252 replica_id: ReplicaId,
253 base_text: T,
254 file: Box<dyn File>,
255 cx: &mut ModelContext<Self>,
256 ) -> Self {
257 Self::build(
258 TextBuffer::new(
259 replica_id,
260 cx.model_id() as u64,
261 History::new(base_text.into()),
262 ),
263 Some(file),
264 )
265 }
266
267 pub fn from_proto(
268 replica_id: ReplicaId,
269 message: proto::Buffer,
270 file: Option<Box<dyn File>>,
271 cx: &mut ModelContext<Self>,
272 ) -> Result<Self> {
273 let mut buffer =
274 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
275 let ops = message
276 .history
277 .into_iter()
278 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
279 buffer.apply_ops(ops)?;
280 for set in message.selections {
281 let set = proto::deserialize_selection_set(set);
282 buffer.add_raw_selection_set(set.id, set);
283 }
284 let mut this = Self::build(buffer, file);
285 this.apply_diagnostic_update(
286 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
287 cx,
288 );
289
290 Ok(this)
291 }
292
293 pub fn to_proto(&self) -> proto::Buffer {
294 proto::Buffer {
295 id: self.remote_id(),
296 content: self.text.base_text().to_string(),
297 history: self
298 .text
299 .history()
300 .map(proto::serialize_edit_operation)
301 .collect(),
302 selections: self
303 .selection_sets()
304 .map(|(_, set)| proto::serialize_selection_set(set))
305 .collect(),
306 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
307 }
308 }
309
310 pub fn with_language(
311 mut self,
312 language: Option<Arc<Language>>,
313 language_server: Option<Arc<LanguageServer>>,
314 cx: &mut ModelContext<Self>,
315 ) -> Self {
316 self.set_language(language, language_server, cx);
317 self
318 }
319
320 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
321 let saved_mtime;
322 if let Some(file) = file.as_ref() {
323 saved_mtime = file.mtime();
324 } else {
325 saved_mtime = UNIX_EPOCH;
326 }
327
328 Self {
329 saved_mtime,
330 saved_version: buffer.version(),
331 text: buffer,
332 file,
333 syntax_tree: Mutex::new(None),
334 parsing_in_background: false,
335 parse_count: 0,
336 sync_parse_timeout: Duration::from_millis(1),
337 autoindent_requests: Default::default(),
338 pending_autoindent: Default::default(),
339 language: None,
340 diagnostics: Default::default(),
341 diagnostics_update_count: 0,
342 language_server: None,
343 deferred_ops: OperationQueue::new(),
344 #[cfg(test)]
345 operations: Default::default(),
346 }
347 }
348
349 pub fn snapshot(&self) -> Snapshot {
350 Snapshot {
351 text: self.text.snapshot(),
352 tree: self.syntax_tree(),
353 diagnostics: self.diagnostics.clone(),
354 diagnostics_update_count: self.diagnostics_update_count,
355 is_parsing: self.parsing_in_background,
356 language: self.language.clone(),
357 parse_count: self.parse_count,
358 }
359 }
360
361 pub fn file(&self) -> Option<&dyn File> {
362 self.file.as_deref()
363 }
364
365 pub fn save(
366 &mut self,
367 cx: &mut ModelContext<Self>,
368 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
369 let file = self
370 .file
371 .as_ref()
372 .ok_or_else(|| anyhow!("buffer has no file"))?;
373 let text = self.as_rope().clone();
374 let version = self.version();
375 let save = file.save(self.remote_id(), text, version, cx.as_mut());
376 Ok(cx.spawn(|this, mut cx| async move {
377 let (version, mtime) = save.await?;
378 this.update(&mut cx, |this, cx| {
379 this.did_save(version.clone(), mtime, None, cx);
380 });
381 Ok((version, mtime))
382 }))
383 }
384
385 pub fn set_language(
386 &mut self,
387 language: Option<Arc<Language>>,
388 language_server: Option<Arc<lsp::LanguageServer>>,
389 cx: &mut ModelContext<Self>,
390 ) {
391 self.language = language;
392 self.language_server = if let Some(server) = language_server {
393 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
394 Some(LanguageServerState {
395 latest_snapshot: latest_snapshot_tx,
396 pending_snapshots: Default::default(),
397 next_version: 0,
398 server: server.clone(),
399 _maintain_server: cx.background().spawn(
400 async move {
401 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
402 while let Some(snapshot) = latest_snapshot_rx.recv().await {
403 if let Some(snapshot) = snapshot {
404 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
405 if let Some(prev_snapshot) = prev_snapshot {
406 let changes = lsp::DidChangeTextDocumentParams {
407 text_document: lsp::VersionedTextDocumentIdentifier::new(
408 uri,
409 snapshot.version as i32,
410 ),
411 content_changes: snapshot
412 .buffer_snapshot
413 .edits_since::<(PointUtf16, usize)>(
414 prev_snapshot.buffer_snapshot.version(),
415 )
416 .map(|edit| {
417 let edit_start = edit.new.start.0;
418 let edit_end = edit_start
419 + (edit.old.end.0 - edit.old.start.0);
420 let new_text = snapshot
421 .buffer_snapshot
422 .text_for_range(
423 edit.new.start.1..edit.new.end.1,
424 )
425 .collect();
426 lsp::TextDocumentContentChangeEvent {
427 range: Some(lsp::Range::new(
428 lsp::Position::new(
429 edit_start.row,
430 edit_start.column,
431 ),
432 lsp::Position::new(
433 edit_end.row,
434 edit_end.column,
435 ),
436 )),
437 range_length: None,
438 text: new_text,
439 }
440 })
441 .collect(),
442 };
443 server
444 .notify::<lsp::notification::DidChangeTextDocument>(changes)
445 .await?;
446 } else {
447 server
448 .notify::<lsp::notification::DidOpenTextDocument>(
449 lsp::DidOpenTextDocumentParams {
450 text_document: lsp::TextDocumentItem::new(
451 uri,
452 Default::default(),
453 snapshot.version as i32,
454 snapshot.buffer_snapshot.text().to_string(),
455 ),
456 },
457 )
458 .await?;
459 }
460
461 prev_snapshot = Some(snapshot);
462 }
463 }
464 Ok(())
465 }
466 .log_err(),
467 ),
468 })
469 } else {
470 None
471 };
472
473 self.reparse(cx);
474 self.update_language_server();
475 }
476
477 pub fn did_save(
478 &mut self,
479 version: clock::Global,
480 mtime: SystemTime,
481 new_file: Option<Box<dyn File>>,
482 cx: &mut ModelContext<Self>,
483 ) {
484 self.saved_mtime = mtime;
485 self.saved_version = version;
486 if let Some(new_file) = new_file {
487 self.file = Some(new_file);
488 }
489 if let Some(state) = &self.language_server {
490 cx.background()
491 .spawn(
492 state
493 .server
494 .notify::<lsp::notification::DidSaveTextDocument>(
495 lsp::DidSaveTextDocumentParams {
496 text_document: lsp::TextDocumentIdentifier {
497 uri: lsp::Url::from_file_path(
498 self.file.as_ref().unwrap().abs_path().unwrap(),
499 )
500 .unwrap(),
501 },
502 text: None,
503 },
504 ),
505 )
506 .detach()
507 }
508 cx.emit(Event::Saved);
509 }
510
511 pub fn file_updated(
512 &mut self,
513 new_file: Box<dyn File>,
514 cx: &mut ModelContext<Self>,
515 ) -> Option<Task<()>> {
516 let old_file = self.file.as_ref()?;
517 let mut file_changed = false;
518 let mut task = None;
519
520 if new_file.path() != old_file.path() {
521 file_changed = true;
522 }
523
524 if new_file.is_deleted() {
525 if !old_file.is_deleted() {
526 file_changed = true;
527 if !self.is_dirty() {
528 cx.emit(Event::Dirtied);
529 }
530 }
531 } else {
532 let new_mtime = new_file.mtime();
533 if new_mtime != old_file.mtime() {
534 file_changed = true;
535
536 if !self.is_dirty() {
537 task = Some(cx.spawn(|this, mut cx| {
538 async move {
539 let new_text = this.read_with(&cx, |this, cx| {
540 this.file.as_ref().and_then(|file| file.load_local(cx))
541 });
542 if let Some(new_text) = new_text {
543 let new_text = new_text.await?;
544 let diff = this
545 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
546 .await;
547 this.update(&mut cx, |this, cx| {
548 if this.apply_diff(diff, cx) {
549 this.saved_version = this.version();
550 this.saved_mtime = new_mtime;
551 cx.emit(Event::Reloaded);
552 }
553 });
554 }
555 Ok(())
556 }
557 .log_err()
558 .map(drop)
559 }));
560 }
561 }
562 }
563
564 if file_changed {
565 cx.emit(Event::FileHandleChanged);
566 }
567 self.file = Some(new_file);
568 task
569 }
570
571 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
572 cx.emit(Event::Closed);
573 }
574
575 pub fn language(&self) -> Option<&Arc<Language>> {
576 self.language.as_ref()
577 }
578
579 pub fn parse_count(&self) -> usize {
580 self.parse_count
581 }
582
583 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
584 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
585 self.interpolate_tree(syntax_tree);
586 Some(syntax_tree.tree.clone())
587 } else {
588 None
589 }
590 }
591
592 #[cfg(any(test, feature = "test-support"))]
593 pub fn is_parsing(&self) -> bool {
594 self.parsing_in_background
595 }
596
597 #[cfg(test)]
598 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
599 self.sync_parse_timeout = timeout;
600 }
601
602 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
603 if self.parsing_in_background {
604 return false;
605 }
606
607 if let Some(grammar) = self.grammar().cloned() {
608 let old_tree = self.syntax_tree();
609 let text = self.as_rope().clone();
610 let parsed_version = self.version();
611 let parse_task = cx.background().spawn({
612 let grammar = grammar.clone();
613 async move { Self::parse_text(&text, old_tree, &grammar) }
614 });
615
616 match cx
617 .background()
618 .block_with_timeout(self.sync_parse_timeout, parse_task)
619 {
620 Ok(new_tree) => {
621 self.did_finish_parsing(new_tree, parsed_version, cx);
622 return true;
623 }
624 Err(parse_task) => {
625 self.parsing_in_background = true;
626 cx.spawn(move |this, mut cx| async move {
627 let new_tree = parse_task.await;
628 this.update(&mut cx, move |this, cx| {
629 let grammar_changed = this
630 .grammar()
631 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
632 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
633 this.parsing_in_background = false;
634 this.did_finish_parsing(new_tree, parsed_version, cx);
635
636 if parse_again && this.reparse(cx) {
637 return;
638 }
639 });
640 })
641 .detach();
642 }
643 }
644 }
645 false
646 }
647
648 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
649 PARSER.with(|parser| {
650 let mut parser = parser.borrow_mut();
651 parser
652 .set_language(grammar.ts_language)
653 .expect("incompatible grammar");
654 let mut chunks = text.chunks_in_range(0..text.len());
655 let tree = parser
656 .parse_with(
657 &mut move |offset, _| {
658 chunks.seek(offset);
659 chunks.next().unwrap_or("").as_bytes()
660 },
661 old_tree.as_ref(),
662 )
663 .unwrap();
664 tree
665 })
666 }
667
668 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
669 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
670 let (bytes, lines) = edit.flatten();
671 tree.tree.edit(&InputEdit {
672 start_byte: bytes.new.start,
673 old_end_byte: bytes.new.start + bytes.old.len(),
674 new_end_byte: bytes.new.end,
675 start_position: lines.new.start.to_ts_point(),
676 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
677 .to_ts_point(),
678 new_end_position: lines.new.end.to_ts_point(),
679 });
680 }
681 tree.version = self.version();
682 }
683
684 fn did_finish_parsing(
685 &mut self,
686 tree: Tree,
687 version: clock::Global,
688 cx: &mut ModelContext<Self>,
689 ) {
690 self.parse_count += 1;
691 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
692 self.request_autoindent(cx);
693 cx.emit(Event::Reparsed);
694 cx.notify();
695 }
696
697 pub fn update_diagnostics(
698 &mut self,
699 version: Option<i32>,
700 mut diagnostics: Vec<lsp::Diagnostic>,
701 cx: &mut ModelContext<Self>,
702 ) -> Result<Operation> {
703 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
704
705 let version = version.map(|version| version as usize);
706 let content = if let Some(version) = version {
707 let language_server = self.language_server.as_mut().unwrap();
708 let snapshot = language_server
709 .pending_snapshots
710 .get(&version)
711 .ok_or_else(|| anyhow!("missing snapshot"))?;
712 &snapshot.buffer_snapshot
713 } else {
714 self.deref()
715 };
716 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
717
718 let empty_set = HashSet::new();
719 let disk_based_sources = self
720 .language
721 .as_ref()
722 .and_then(|language| language.disk_based_diagnostic_sources())
723 .unwrap_or(&empty_set);
724
725 let mut edits_since_save = content
726 .edits_since::<PointUtf16>(&self.saved_version)
727 .peekable();
728 let mut last_edit_old_end = PointUtf16::zero();
729 let mut last_edit_new_end = PointUtf16::zero();
730 let mut group_ids_by_diagnostic_range = HashMap::new();
731 let mut diagnostics_by_group_id = HashMap::new();
732 let mut next_group_id = 0;
733 'outer: for diagnostic in &diagnostics {
734 let mut start = diagnostic.range.start.to_point_utf16();
735 let mut end = diagnostic.range.end.to_point_utf16();
736 let source = diagnostic.source.as_ref();
737 let code = diagnostic.code.as_ref();
738 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
739 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
740 .copied()
741 .unwrap_or_else(|| {
742 let group_id = post_inc(&mut next_group_id);
743 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
744 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
745 }
746 group_id
747 });
748
749 if diagnostic
750 .source
751 .as_ref()
752 .map_or(false, |source| disk_based_sources.contains(source))
753 {
754 while let Some(edit) = edits_since_save.peek() {
755 if edit.old.end <= start {
756 last_edit_old_end = edit.old.end;
757 last_edit_new_end = edit.new.end;
758 edits_since_save.next();
759 } else if edit.old.start <= end && edit.old.end >= start {
760 continue 'outer;
761 } else {
762 break;
763 }
764 }
765
766 start = last_edit_new_end + (start - last_edit_old_end);
767 end = last_edit_new_end + (end - last_edit_old_end);
768 }
769
770 let mut range = content.clip_point_utf16(start, Bias::Left)
771 ..content.clip_point_utf16(end, Bias::Right);
772 if range.start == range.end {
773 range.end.column += 1;
774 range.end = content.clip_point_utf16(range.end, Bias::Right);
775 if range.start == range.end && range.end.column > 0 {
776 range.start.column -= 1;
777 range.start = content.clip_point_utf16(range.start, Bias::Left);
778 }
779 }
780
781 diagnostics_by_group_id
782 .entry(group_id)
783 .or_insert(Vec::new())
784 .push((
785 range,
786 Diagnostic {
787 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
788 message: diagnostic.message.clone(),
789 group_id,
790 is_primary: false,
791 },
792 ));
793 }
794
795 drop(edits_since_save);
796 self.diagnostics
797 .reset(
798 diagnostics_by_group_id
799 .into_values()
800 .flat_map(|mut diagnostics| {
801 let primary_diagnostic =
802 diagnostics.iter_mut().min_by_key(|d| d.1.severity).unwrap();
803 primary_diagnostic.1.is_primary = true;
804 diagnostics
805 }),
806 );
807
808 if let Some(version) = version {
809 let language_server = self.language_server.as_mut().unwrap();
810 let versions_to_delete = language_server
811 .pending_snapshots
812 .range(..version)
813 .map(|(v, _)| *v)
814 .collect::<Vec<_>>();
815 for version in versions_to_delete {
816 language_server.pending_snapshots.remove(&version);
817 }
818 }
819
820 self.diagnostics_update_count += 1;
821 cx.notify();
822 cx.emit(Event::DiagnosticsUpdated);
823 Ok(Operation::UpdateDiagnostics {
824 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
825 lamport_timestamp: self.lamport_timestamp(),
826 })
827 }
828
829 pub fn diagnostics_in_range<'a, T>(
830 &'a self,
831 search_range: Range<T>,
832 ) -> impl Iterator<Item = &DiagnosticEntry>
833 where
834 T: 'a + ToOffset,
835 {
836 self.diagnostics.range(search_range, self, true)
837 }
838
839 pub fn diagnostic_group(&self, group_id: usize) -> impl Iterator<Item = &DiagnosticEntry> {
840 self.diagnostics.group(group_id)
841 }
842
843 pub fn diagnostics_update_count(&self) -> usize {
844 self.diagnostics_update_count
845 }
846
847 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
848 if let Some(indent_columns) = self.compute_autoindents() {
849 let indent_columns = cx.background().spawn(indent_columns);
850 match cx
851 .background()
852 .block_with_timeout(Duration::from_micros(500), indent_columns)
853 {
854 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
855 Err(indent_columns) => {
856 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
857 let indent_columns = indent_columns.await;
858 this.update(&mut cx, |this, cx| {
859 this.apply_autoindents(indent_columns, cx);
860 });
861 }));
862 }
863 }
864 }
865 }
866
867 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
868 let max_rows_between_yields = 100;
869 let snapshot = self.snapshot();
870 if snapshot.language.is_none()
871 || snapshot.tree.is_none()
872 || self.autoindent_requests.is_empty()
873 {
874 return None;
875 }
876
877 let autoindent_requests = self.autoindent_requests.clone();
878 Some(async move {
879 let mut indent_columns = BTreeMap::new();
880 for request in autoindent_requests {
881 let old_to_new_rows = request
882 .edited
883 .iter()
884 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
885 .zip(
886 request
887 .edited
888 .iter()
889 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
890 )
891 .collect::<BTreeMap<u32, u32>>();
892
893 let mut old_suggestions = HashMap::<u32, u32>::default();
894 let old_edited_ranges =
895 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
896 for old_edited_range in old_edited_ranges {
897 let suggestions = request
898 .before_edit
899 .suggest_autoindents(old_edited_range.clone())
900 .into_iter()
901 .flatten();
902 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
903 let indentation_basis = old_to_new_rows
904 .get(&suggestion.basis_row)
905 .and_then(|from_row| old_suggestions.get(from_row).copied())
906 .unwrap_or_else(|| {
907 request
908 .before_edit
909 .indent_column_for_line(suggestion.basis_row)
910 });
911 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
912 old_suggestions.insert(
913 *old_to_new_rows.get(&old_row).unwrap(),
914 indentation_basis + delta,
915 );
916 }
917 yield_now().await;
918 }
919
920 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
921 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
922 let new_edited_row_ranges =
923 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
924 for new_edited_row_range in new_edited_row_ranges {
925 let suggestions = snapshot
926 .suggest_autoindents(new_edited_row_range.clone())
927 .into_iter()
928 .flatten();
929 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
930 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
931 let new_indentation = indent_columns
932 .get(&suggestion.basis_row)
933 .copied()
934 .unwrap_or_else(|| {
935 snapshot.indent_column_for_line(suggestion.basis_row)
936 })
937 + delta;
938 if old_suggestions
939 .get(&new_row)
940 .map_or(true, |old_indentation| new_indentation != *old_indentation)
941 {
942 indent_columns.insert(new_row, new_indentation);
943 }
944 }
945 yield_now().await;
946 }
947
948 if let Some(inserted) = request.inserted.as_ref() {
949 let inserted_row_ranges = contiguous_ranges(
950 inserted
951 .iter()
952 .map(|range| range.to_point(&snapshot))
953 .flat_map(|range| range.start.row..range.end.row + 1),
954 max_rows_between_yields,
955 );
956 for inserted_row_range in inserted_row_ranges {
957 let suggestions = snapshot
958 .suggest_autoindents(inserted_row_range.clone())
959 .into_iter()
960 .flatten();
961 for (row, suggestion) in inserted_row_range.zip(suggestions) {
962 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
963 let new_indentation = indent_columns
964 .get(&suggestion.basis_row)
965 .copied()
966 .unwrap_or_else(|| {
967 snapshot.indent_column_for_line(suggestion.basis_row)
968 })
969 + delta;
970 indent_columns.insert(row, new_indentation);
971 }
972 yield_now().await;
973 }
974 }
975 }
976 indent_columns
977 })
978 }
979
980 fn apply_autoindents(
981 &mut self,
982 indent_columns: BTreeMap<u32, u32>,
983 cx: &mut ModelContext<Self>,
984 ) {
985 let selection_set_ids = self
986 .autoindent_requests
987 .drain(..)
988 .flat_map(|req| req.selection_set_ids.clone())
989 .collect::<HashSet<_>>();
990
991 self.start_transaction(selection_set_ids.iter().copied())
992 .unwrap();
993 for (row, indent_column) in &indent_columns {
994 self.set_indent_column_for_line(*row, *indent_column, cx);
995 }
996
997 for selection_set_id in &selection_set_ids {
998 if let Ok(set) = self.selection_set(*selection_set_id) {
999 let new_selections = set
1000 .selections::<Point>(&*self)
1001 .map(|selection| {
1002 if selection.start.column == 0 {
1003 let delta = Point::new(
1004 0,
1005 indent_columns
1006 .get(&selection.start.row)
1007 .copied()
1008 .unwrap_or(0),
1009 );
1010 if delta.column > 0 {
1011 return Selection {
1012 id: selection.id,
1013 goal: selection.goal,
1014 reversed: selection.reversed,
1015 start: selection.start + delta,
1016 end: selection.end + delta,
1017 };
1018 }
1019 }
1020 selection
1021 })
1022 .collect::<Vec<_>>();
1023 self.update_selection_set(*selection_set_id, &new_selections, cx)
1024 .unwrap();
1025 }
1026 }
1027
1028 self.end_transaction(selection_set_ids.iter().copied(), cx)
1029 .unwrap();
1030 }
1031
1032 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1033 let current_column = self.indent_column_for_line(row);
1034 if column > current_column {
1035 let offset = Point::new(row, 0).to_offset(&*self);
1036 self.edit(
1037 [offset..offset],
1038 " ".repeat((column - current_column) as usize),
1039 cx,
1040 );
1041 } else if column < current_column {
1042 self.edit(
1043 [Point::new(row, 0)..Point::new(row, current_column - column)],
1044 "",
1045 cx,
1046 );
1047 }
1048 }
1049
1050 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1051 if let Some(tree) = self.syntax_tree() {
1052 let root = tree.root_node();
1053 let range = range.start.to_offset(self)..range.end.to_offset(self);
1054 let mut node = root.descendant_for_byte_range(range.start, range.end);
1055 while node.map_or(false, |n| n.byte_range() == range) {
1056 node = node.unwrap().parent();
1057 }
1058 node.map(|n| n.byte_range())
1059 } else {
1060 None
1061 }
1062 }
1063
1064 pub fn enclosing_bracket_ranges<T: ToOffset>(
1065 &self,
1066 range: Range<T>,
1067 ) -> Option<(Range<usize>, Range<usize>)> {
1068 let (grammar, tree) = self.grammar().zip(self.syntax_tree())?;
1069 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1070 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1071
1072 // Find bracket pairs that *inclusively* contain the given range.
1073 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1074 let mut cursor = QueryCursorHandle::new();
1075 let matches = cursor.set_byte_range(range).matches(
1076 &grammar.brackets_query,
1077 tree.root_node(),
1078 TextProvider(self.as_rope()),
1079 );
1080
1081 // Get the ranges of the innermost pair of brackets.
1082 matches
1083 .filter_map(|mat| {
1084 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1085 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1086 Some((open.byte_range(), close.byte_range()))
1087 })
1088 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1089 }
1090
1091 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1092 // TODO: it would be nice to not allocate here.
1093 let old_text = self.text();
1094 let base_version = self.version();
1095 cx.background().spawn(async move {
1096 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1097 .iter_all_changes()
1098 .map(|c| (c.tag(), c.value().len()))
1099 .collect::<Vec<_>>();
1100 Diff {
1101 base_version,
1102 new_text,
1103 changes,
1104 }
1105 })
1106 }
1107
1108 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1109 if self.version == diff.base_version {
1110 self.start_transaction(None).unwrap();
1111 let mut offset = 0;
1112 for (tag, len) in diff.changes {
1113 let range = offset..(offset + len);
1114 match tag {
1115 ChangeTag::Equal => offset += len,
1116 ChangeTag::Delete => self.edit(Some(range), "", cx),
1117 ChangeTag::Insert => {
1118 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1119 offset += len;
1120 }
1121 }
1122 }
1123 self.end_transaction(None, cx).unwrap();
1124 true
1125 } else {
1126 false
1127 }
1128 }
1129
1130 pub fn is_dirty(&self) -> bool {
1131 !self.saved_version.ge(&self.version)
1132 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1133 }
1134
1135 pub fn has_conflict(&self) -> bool {
1136 !self.saved_version.ge(&self.version)
1137 && self
1138 .file
1139 .as_ref()
1140 .map_or(false, |file| file.mtime() > self.saved_mtime)
1141 }
1142
1143 pub fn subscribe(&mut self) -> Subscription {
1144 self.text.subscribe()
1145 }
1146
1147 pub fn start_transaction(
1148 &mut self,
1149 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1150 ) -> Result<()> {
1151 self.start_transaction_at(selection_set_ids, Instant::now())
1152 }
1153
1154 pub(crate) fn start_transaction_at(
1155 &mut self,
1156 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1157 now: Instant,
1158 ) -> Result<()> {
1159 self.text.start_transaction_at(selection_set_ids, now)
1160 }
1161
1162 pub fn end_transaction(
1163 &mut self,
1164 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1165 cx: &mut ModelContext<Self>,
1166 ) -> Result<()> {
1167 self.end_transaction_at(selection_set_ids, Instant::now(), cx)
1168 }
1169
1170 pub(crate) fn end_transaction_at(
1171 &mut self,
1172 selection_set_ids: impl IntoIterator<Item = SelectionSetId>,
1173 now: Instant,
1174 cx: &mut ModelContext<Self>,
1175 ) -> Result<()> {
1176 if let Some(start_version) = self.text.end_transaction_at(selection_set_ids, now) {
1177 let was_dirty = start_version != self.saved_version;
1178 self.did_edit(&start_version, was_dirty, cx);
1179 }
1180 Ok(())
1181 }
1182
1183 fn update_language_server(&mut self) {
1184 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1185 language_server
1186 } else {
1187 return;
1188 };
1189 let abs_path = self
1190 .file
1191 .as_ref()
1192 .map_or(Path::new("/").to_path_buf(), |file| {
1193 file.abs_path().unwrap()
1194 });
1195
1196 let version = post_inc(&mut language_server.next_version);
1197 let snapshot = LanguageServerSnapshot {
1198 buffer_snapshot: self.text.snapshot(),
1199 version,
1200 path: Arc::from(abs_path),
1201 };
1202 language_server
1203 .pending_snapshots
1204 .insert(version, snapshot.clone());
1205 let _ = language_server
1206 .latest_snapshot
1207 .blocking_send(Some(snapshot));
1208 }
1209
1210 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1211 where
1212 I: IntoIterator<Item = Range<S>>,
1213 S: ToOffset,
1214 T: Into<String>,
1215 {
1216 self.edit_internal(ranges_iter, new_text, false, cx)
1217 }
1218
1219 pub fn edit_with_autoindent<I, S, T>(
1220 &mut self,
1221 ranges_iter: I,
1222 new_text: T,
1223 cx: &mut ModelContext<Self>,
1224 ) where
1225 I: IntoIterator<Item = Range<S>>,
1226 S: ToOffset,
1227 T: Into<String>,
1228 {
1229 self.edit_internal(ranges_iter, new_text, true, cx)
1230 }
1231
1232 pub fn edit_internal<I, S, T>(
1233 &mut self,
1234 ranges_iter: I,
1235 new_text: T,
1236 autoindent: bool,
1237 cx: &mut ModelContext<Self>,
1238 ) where
1239 I: IntoIterator<Item = Range<S>>,
1240 S: ToOffset,
1241 T: Into<String>,
1242 {
1243 let new_text = new_text.into();
1244
1245 // Skip invalid ranges and coalesce contiguous ones.
1246 let mut ranges: Vec<Range<usize>> = Vec::new();
1247 for range in ranges_iter {
1248 let range = range.start.to_offset(self)..range.end.to_offset(self);
1249 if !new_text.is_empty() || !range.is_empty() {
1250 if let Some(prev_range) = ranges.last_mut() {
1251 if prev_range.end >= range.start {
1252 prev_range.end = cmp::max(prev_range.end, range.end);
1253 } else {
1254 ranges.push(range);
1255 }
1256 } else {
1257 ranges.push(range);
1258 }
1259 }
1260 }
1261 if ranges.is_empty() {
1262 return;
1263 }
1264
1265 self.start_transaction(None).unwrap();
1266 self.pending_autoindent.take();
1267 let autoindent_request = if autoindent && self.language.is_some() {
1268 let before_edit = self.snapshot();
1269 let edited = ranges
1270 .iter()
1271 .filter_map(|range| {
1272 let start = range.start.to_point(self);
1273 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1274 None
1275 } else {
1276 Some(self.anchor_before(range.start))
1277 }
1278 })
1279 .collect();
1280 Some((before_edit, edited))
1281 } else {
1282 None
1283 };
1284
1285 let first_newline_ix = new_text.find('\n');
1286 let new_text_len = new_text.len();
1287
1288 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1289
1290 if let Some((before_edit, edited)) = autoindent_request {
1291 let mut inserted = None;
1292 if let Some(first_newline_ix) = first_newline_ix {
1293 let mut delta = 0isize;
1294 inserted = Some(
1295 ranges
1296 .iter()
1297 .map(|range| {
1298 let start =
1299 (delta + range.start as isize) as usize + first_newline_ix + 1;
1300 let end = (delta + range.start as isize) as usize + new_text_len;
1301 delta +=
1302 (range.end as isize - range.start as isize) + new_text_len as isize;
1303 self.anchor_before(start)..self.anchor_after(end)
1304 })
1305 .collect(),
1306 );
1307 }
1308
1309 let selection_set_ids = self
1310 .text
1311 .peek_undo_stack()
1312 .unwrap()
1313 .starting_selection_set_ids()
1314 .collect();
1315 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1316 selection_set_ids,
1317 before_edit,
1318 edited,
1319 inserted,
1320 }));
1321 }
1322
1323 self.end_transaction(None, cx).unwrap();
1324 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1325 }
1326
1327 fn did_edit(
1328 &mut self,
1329 old_version: &clock::Global,
1330 was_dirty: bool,
1331 cx: &mut ModelContext<Self>,
1332 ) {
1333 if self.edits_since::<usize>(old_version).next().is_none() {
1334 return;
1335 }
1336
1337 self.reparse(cx);
1338 self.update_language_server();
1339
1340 cx.emit(Event::Edited);
1341 if !was_dirty {
1342 cx.emit(Event::Dirtied);
1343 }
1344 cx.notify();
1345 }
1346
1347 fn grammar(&self) -> Option<&Arc<Grammar>> {
1348 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1349 }
1350
1351 pub fn add_selection_set<T: ToOffset>(
1352 &mut self,
1353 selections: &[Selection<T>],
1354 cx: &mut ModelContext<Self>,
1355 ) -> SelectionSetId {
1356 let operation = self.text.add_selection_set(selections);
1357 if let text::Operation::UpdateSelections { set_id, .. } = &operation {
1358 let set_id = *set_id;
1359 cx.notify();
1360 self.send_operation(Operation::Buffer(operation), cx);
1361 set_id
1362 } else {
1363 unreachable!()
1364 }
1365 }
1366
1367 pub fn update_selection_set<T: ToOffset>(
1368 &mut self,
1369 set_id: SelectionSetId,
1370 selections: &[Selection<T>],
1371 cx: &mut ModelContext<Self>,
1372 ) -> Result<()> {
1373 let operation = self.text.update_selection_set(set_id, selections)?;
1374 cx.notify();
1375 self.send_operation(Operation::Buffer(operation), cx);
1376 Ok(())
1377 }
1378
1379 pub fn set_active_selection_set(
1380 &mut self,
1381 set_id: Option<SelectionSetId>,
1382 cx: &mut ModelContext<Self>,
1383 ) -> Result<()> {
1384 let operation = self.text.set_active_selection_set(set_id)?;
1385 self.send_operation(Operation::Buffer(operation), cx);
1386 Ok(())
1387 }
1388
1389 pub fn remove_selection_set(
1390 &mut self,
1391 set_id: SelectionSetId,
1392 cx: &mut ModelContext<Self>,
1393 ) -> Result<()> {
1394 let operation = self.text.remove_selection_set(set_id)?;
1395 cx.notify();
1396 self.send_operation(Operation::Buffer(operation), cx);
1397 Ok(())
1398 }
1399
1400 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1401 &mut self,
1402 ops: I,
1403 cx: &mut ModelContext<Self>,
1404 ) -> Result<()> {
1405 self.pending_autoindent.take();
1406 let was_dirty = self.is_dirty();
1407 let old_version = self.version.clone();
1408 let mut deferred_ops = Vec::new();
1409 let buffer_ops = ops
1410 .into_iter()
1411 .filter_map(|op| match op {
1412 Operation::Buffer(op) => Some(op),
1413 _ => {
1414 if self.can_apply_op(&op) {
1415 self.apply_op(op, cx);
1416 } else {
1417 deferred_ops.push(op);
1418 }
1419 None
1420 }
1421 })
1422 .collect::<Vec<_>>();
1423 self.text.apply_ops(buffer_ops)?;
1424 self.flush_deferred_ops(cx);
1425 self.did_edit(&old_version, was_dirty, cx);
1426 // Notify independently of whether the buffer was edited as the operations could include a
1427 // selection update.
1428 cx.notify();
1429 Ok(())
1430 }
1431
1432 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1433 let mut deferred_ops = Vec::new();
1434 for op in self.deferred_ops.drain().iter().cloned() {
1435 if self.can_apply_op(&op) {
1436 self.apply_op(op, cx);
1437 } else {
1438 deferred_ops.push(op);
1439 }
1440 }
1441 self.deferred_ops.insert(deferred_ops);
1442 }
1443
1444 fn can_apply_op(&self, operation: &Operation) -> bool {
1445 match operation {
1446 Operation::Buffer(_) => {
1447 unreachable!("buffer operations should never be applied at this layer")
1448 }
1449 Operation::UpdateDiagnostics { diagnostics, .. } => {
1450 diagnostics.iter().all(|diagnostic| {
1451 self.text.can_resolve(&diagnostic.range.start)
1452 && self.text.can_resolve(&diagnostic.range.end)
1453 })
1454 }
1455 }
1456 }
1457
1458 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1459 match operation {
1460 Operation::Buffer(_) => {
1461 unreachable!("buffer operations should never be applied at this layer")
1462 }
1463 Operation::UpdateDiagnostics { diagnostics, .. } => {
1464 self.apply_diagnostic_update(diagnostics, cx);
1465 }
1466 }
1467 }
1468
1469 fn apply_diagnostic_update(
1470 &mut self,
1471 diagnostics: Arc<[DiagnosticEntry]>,
1472 cx: &mut ModelContext<Self>,
1473 ) {
1474 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1475 self.diagnostics_update_count += 1;
1476 cx.notify();
1477 }
1478
1479 #[cfg(not(test))]
1480 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1481 if let Some(file) = &self.file {
1482 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1483 }
1484 }
1485
1486 #[cfg(test)]
1487 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1488 self.operations.push(operation);
1489 }
1490
1491 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1492 self.text.remove_peer(replica_id);
1493 cx.notify();
1494 }
1495
1496 pub fn undo(&mut self, cx: &mut ModelContext<Self>) {
1497 let was_dirty = self.is_dirty();
1498 let old_version = self.version.clone();
1499
1500 for operation in self.text.undo() {
1501 self.send_operation(Operation::Buffer(operation), cx);
1502 }
1503
1504 self.did_edit(&old_version, was_dirty, cx);
1505 }
1506
1507 pub fn redo(&mut self, cx: &mut ModelContext<Self>) {
1508 let was_dirty = self.is_dirty();
1509 let old_version = self.version.clone();
1510
1511 for operation in self.text.redo() {
1512 self.send_operation(Operation::Buffer(operation), cx);
1513 }
1514
1515 self.did_edit(&old_version, was_dirty, cx);
1516 }
1517}
1518
1519#[cfg(any(test, feature = "test-support"))]
1520impl Buffer {
1521 pub fn randomly_edit<T>(
1522 &mut self,
1523 rng: &mut T,
1524 old_range_count: usize,
1525 cx: &mut ModelContext<Self>,
1526 ) where
1527 T: rand::Rng,
1528 {
1529 self.start_transaction(None).unwrap();
1530 self.text.randomly_edit(rng, old_range_count);
1531 self.end_transaction(None, cx).unwrap();
1532 }
1533
1534 pub fn randomly_mutate<T>(&mut self, rng: &mut T, cx: &mut ModelContext<Self>)
1535 where
1536 T: rand::Rng,
1537 {
1538 self.start_transaction(None).unwrap();
1539 self.text.randomly_mutate(rng);
1540 self.end_transaction(None, cx).unwrap();
1541 }
1542}
1543
1544impl Entity for Buffer {
1545 type Event = Event;
1546
1547 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1548 if let Some(file) = self.file.as_ref() {
1549 file.buffer_removed(self.remote_id(), cx);
1550 }
1551 }
1552}
1553
1554impl Deref for Buffer {
1555 type Target = TextBuffer;
1556
1557 fn deref(&self) -> &Self::Target {
1558 &self.text
1559 }
1560}
1561
1562impl Snapshot {
1563 fn suggest_autoindents<'a>(
1564 &'a self,
1565 row_range: Range<u32>,
1566 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1567 let mut query_cursor = QueryCursorHandle::new();
1568 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1569 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1570
1571 // Get the "indentation ranges" that intersect this row range.
1572 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1573 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1574 query_cursor.set_point_range(
1575 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1576 ..Point::new(row_range.end, 0).to_ts_point(),
1577 );
1578 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1579 for mat in query_cursor.matches(
1580 &grammar.indents_query,
1581 tree.root_node(),
1582 TextProvider(self.as_rope()),
1583 ) {
1584 let mut node_kind = "";
1585 let mut start: Option<Point> = None;
1586 let mut end: Option<Point> = None;
1587 for capture in mat.captures {
1588 if Some(capture.index) == indent_capture_ix {
1589 node_kind = capture.node.kind();
1590 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1591 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1592 } else if Some(capture.index) == end_capture_ix {
1593 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1594 }
1595 }
1596
1597 if let Some((start, end)) = start.zip(end) {
1598 if start.row == end.row {
1599 continue;
1600 }
1601
1602 let range = start..end;
1603 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1604 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1605 Ok(ix) => {
1606 let prev_range = &mut indentation_ranges[ix];
1607 prev_range.0.end = prev_range.0.end.max(range.end);
1608 }
1609 }
1610 }
1611 }
1612
1613 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1614 Some(row_range.map(move |row| {
1615 let row_start = Point::new(row, self.indent_column_for_line(row));
1616
1617 let mut indent_from_prev_row = false;
1618 let mut outdent_to_row = u32::MAX;
1619 for (range, _node_kind) in &indentation_ranges {
1620 if range.start.row >= row {
1621 break;
1622 }
1623
1624 if range.start.row == prev_row && range.end > row_start {
1625 indent_from_prev_row = true;
1626 }
1627 if range.end.row >= prev_row && range.end <= row_start {
1628 outdent_to_row = outdent_to_row.min(range.start.row);
1629 }
1630 }
1631
1632 let suggestion = if outdent_to_row == prev_row {
1633 IndentSuggestion {
1634 basis_row: prev_row,
1635 indent: false,
1636 }
1637 } else if indent_from_prev_row {
1638 IndentSuggestion {
1639 basis_row: prev_row,
1640 indent: true,
1641 }
1642 } else if outdent_to_row < prev_row {
1643 IndentSuggestion {
1644 basis_row: outdent_to_row,
1645 indent: false,
1646 }
1647 } else {
1648 IndentSuggestion {
1649 basis_row: prev_row,
1650 indent: false,
1651 }
1652 };
1653
1654 prev_row = row;
1655 suggestion
1656 }))
1657 } else {
1658 None
1659 }
1660 }
1661
1662 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1663 while row > 0 {
1664 row -= 1;
1665 if !self.is_line_blank(row) {
1666 return Some(row);
1667 }
1668 }
1669 None
1670 }
1671
1672 pub fn chunks<'a, T: ToOffset>(
1673 &'a self,
1674 range: Range<T>,
1675 theme: Option<&'a SyntaxTheme>,
1676 ) -> Chunks<'a> {
1677 let range = range.start.to_offset(self)..range.end.to_offset(self);
1678
1679 let mut highlights = None;
1680 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1681 if let Some(theme) = theme {
1682 for entry in self.diagnostics.range(range.clone(), self, true) {
1683 diagnostic_endpoints.push(DiagnosticEndpoint {
1684 offset: entry.range.start.to_offset(self),
1685 is_start: true,
1686 severity: entry.diagnostic.severity,
1687 });
1688 diagnostic_endpoints.push(DiagnosticEndpoint {
1689 offset: entry.range.end.to_offset(self),
1690 is_start: false,
1691 severity: entry.diagnostic.severity,
1692 });
1693 }
1694 diagnostic_endpoints
1695 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1696
1697 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1698 let mut query_cursor = QueryCursorHandle::new();
1699
1700 // TODO - add a Tree-sitter API to remove the need for this.
1701 let cursor = unsafe {
1702 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1703 };
1704 let captures = cursor.set_byte_range(range.clone()).captures(
1705 &grammar.highlights_query,
1706 tree.root_node(),
1707 TextProvider(self.text.as_rope()),
1708 );
1709 highlights = Some(Highlights {
1710 captures,
1711 next_capture: None,
1712 stack: Default::default(),
1713 highlight_map: grammar.highlight_map(),
1714 _query_cursor: query_cursor,
1715 theme,
1716 })
1717 }
1718 }
1719
1720 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1721 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1722
1723 Chunks {
1724 range,
1725 chunks,
1726 diagnostic_endpoints,
1727 error_depth: 0,
1728 warning_depth: 0,
1729 information_depth: 0,
1730 hint_depth: 0,
1731 highlights,
1732 }
1733 }
1734
1735 fn grammar(&self) -> Option<&Arc<Grammar>> {
1736 self.language
1737 .as_ref()
1738 .and_then(|language| language.grammar.as_ref())
1739 }
1740
1741 pub fn diagnostics_update_count(&self) -> usize {
1742 self.diagnostics_update_count
1743 }
1744
1745 pub fn parse_count(&self) -> usize {
1746 self.parse_count
1747 }
1748}
1749
1750impl Clone for Snapshot {
1751 fn clone(&self) -> Self {
1752 Self {
1753 text: self.text.clone(),
1754 tree: self.tree.clone(),
1755 diagnostics: self.diagnostics.clone(),
1756 diagnostics_update_count: self.diagnostics_update_count,
1757 is_parsing: self.is_parsing,
1758 language: self.language.clone(),
1759 parse_count: self.parse_count,
1760 }
1761 }
1762}
1763
1764impl Deref for Snapshot {
1765 type Target = text::Snapshot;
1766
1767 fn deref(&self) -> &Self::Target {
1768 &self.text
1769 }
1770}
1771
1772impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1773 type I = ByteChunks<'a>;
1774
1775 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1776 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1777 }
1778}
1779
1780struct ByteChunks<'a>(rope::Chunks<'a>);
1781
1782impl<'a> Iterator for ByteChunks<'a> {
1783 type Item = &'a [u8];
1784
1785 fn next(&mut self) -> Option<Self::Item> {
1786 self.0.next().map(str::as_bytes)
1787 }
1788}
1789
1790unsafe impl<'a> Send for Chunks<'a> {}
1791
1792impl<'a> Chunks<'a> {
1793 pub fn seek(&mut self, offset: usize) {
1794 self.range.start = offset;
1795 self.chunks.seek(self.range.start);
1796 if let Some(highlights) = self.highlights.as_mut() {
1797 highlights
1798 .stack
1799 .retain(|(end_offset, _)| *end_offset > offset);
1800 if let Some((mat, capture_ix)) = &highlights.next_capture {
1801 let capture = mat.captures[*capture_ix as usize];
1802 if offset >= capture.node.start_byte() {
1803 let next_capture_end = capture.node.end_byte();
1804 if offset < next_capture_end {
1805 highlights.stack.push((
1806 next_capture_end,
1807 highlights.highlight_map.get(capture.index),
1808 ));
1809 }
1810 highlights.next_capture.take();
1811 }
1812 }
1813 highlights.captures.set_byte_range(self.range.clone());
1814 }
1815 }
1816
1817 pub fn offset(&self) -> usize {
1818 self.range.start
1819 }
1820
1821 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1822 let depth = match endpoint.severity {
1823 DiagnosticSeverity::ERROR => &mut self.error_depth,
1824 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1825 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1826 DiagnosticSeverity::HINT => &mut self.hint_depth,
1827 _ => return,
1828 };
1829 if endpoint.is_start {
1830 *depth += 1;
1831 } else {
1832 *depth -= 1;
1833 }
1834 }
1835
1836 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1837 if self.error_depth > 0 {
1838 Some(DiagnosticSeverity::ERROR)
1839 } else if self.warning_depth > 0 {
1840 Some(DiagnosticSeverity::WARNING)
1841 } else if self.information_depth > 0 {
1842 Some(DiagnosticSeverity::INFORMATION)
1843 } else if self.hint_depth > 0 {
1844 Some(DiagnosticSeverity::HINT)
1845 } else {
1846 None
1847 }
1848 }
1849}
1850
1851impl<'a> Iterator for Chunks<'a> {
1852 type Item = Chunk<'a>;
1853
1854 fn next(&mut self) -> Option<Self::Item> {
1855 let mut next_capture_start = usize::MAX;
1856 let mut next_diagnostic_endpoint = usize::MAX;
1857
1858 if let Some(highlights) = self.highlights.as_mut() {
1859 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1860 if *parent_capture_end <= self.range.start {
1861 highlights.stack.pop();
1862 } else {
1863 break;
1864 }
1865 }
1866
1867 if highlights.next_capture.is_none() {
1868 highlights.next_capture = highlights.captures.next();
1869 }
1870
1871 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1872 let capture = mat.captures[*capture_ix as usize];
1873 if self.range.start < capture.node.start_byte() {
1874 next_capture_start = capture.node.start_byte();
1875 break;
1876 } else {
1877 let highlight_id = highlights.highlight_map.get(capture.index);
1878 highlights
1879 .stack
1880 .push((capture.node.end_byte(), highlight_id));
1881 highlights.next_capture = highlights.captures.next();
1882 }
1883 }
1884 }
1885
1886 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1887 if endpoint.offset <= self.range.start {
1888 self.update_diagnostic_depths(endpoint);
1889 self.diagnostic_endpoints.next();
1890 } else {
1891 next_diagnostic_endpoint = endpoint.offset;
1892 break;
1893 }
1894 }
1895
1896 if let Some(chunk) = self.chunks.peek() {
1897 let chunk_start = self.range.start;
1898 let mut chunk_end = (self.chunks.offset() + chunk.len())
1899 .min(next_capture_start)
1900 .min(next_diagnostic_endpoint);
1901 let mut highlight_style = None;
1902 if let Some(highlights) = self.highlights.as_ref() {
1903 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1904 chunk_end = chunk_end.min(*parent_capture_end);
1905 highlight_style = parent_highlight_id.style(highlights.theme);
1906 }
1907 }
1908
1909 let slice =
1910 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1911 self.range.start = chunk_end;
1912 if self.range.start == self.chunks.offset() + chunk.len() {
1913 self.chunks.next().unwrap();
1914 }
1915
1916 Some(Chunk {
1917 text: slice,
1918 highlight_style,
1919 diagnostic: self.current_diagnostic_severity(),
1920 })
1921 } else {
1922 None
1923 }
1924 }
1925}
1926
1927impl QueryCursorHandle {
1928 fn new() -> Self {
1929 QueryCursorHandle(Some(
1930 QUERY_CURSORS
1931 .lock()
1932 .pop()
1933 .unwrap_or_else(|| QueryCursor::new()),
1934 ))
1935 }
1936}
1937
1938impl Deref for QueryCursorHandle {
1939 type Target = QueryCursor;
1940
1941 fn deref(&self) -> &Self::Target {
1942 self.0.as_ref().unwrap()
1943 }
1944}
1945
1946impl DerefMut for QueryCursorHandle {
1947 fn deref_mut(&mut self) -> &mut Self::Target {
1948 self.0.as_mut().unwrap()
1949 }
1950}
1951
1952impl Drop for QueryCursorHandle {
1953 fn drop(&mut self) {
1954 let mut cursor = self.0.take().unwrap();
1955 cursor.set_byte_range(0..usize::MAX);
1956 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1957 QUERY_CURSORS.lock().push(cursor)
1958 }
1959}
1960
1961trait ToTreeSitterPoint {
1962 fn to_ts_point(self) -> tree_sitter::Point;
1963 fn from_ts_point(point: tree_sitter::Point) -> Self;
1964}
1965
1966impl ToTreeSitterPoint for Point {
1967 fn to_ts_point(self) -> tree_sitter::Point {
1968 tree_sitter::Point::new(self.row as usize, self.column as usize)
1969 }
1970
1971 fn from_ts_point(point: tree_sitter::Point) -> Self {
1972 Point::new(point.row as u32, point.column as u32)
1973 }
1974}
1975
1976trait ToPointUtf16 {
1977 fn to_point_utf16(self) -> PointUtf16;
1978}
1979
1980impl ToPointUtf16 for lsp::Position {
1981 fn to_point_utf16(self) -> PointUtf16 {
1982 PointUtf16::new(self.line, self.character)
1983 }
1984}
1985
1986impl operation_queue::Operation for Operation {
1987 fn lamport_timestamp(&self) -> clock::Lamport {
1988 match self {
1989 Operation::Buffer(_) => {
1990 unreachable!("buffer operations should never be deferred at this layer")
1991 }
1992 Operation::UpdateDiagnostics {
1993 lamport_timestamp, ..
1994 } => *lamport_timestamp,
1995 }
1996 }
1997}
1998
1999fn diagnostic_ranges<'a>(
2000 diagnostic: &'a lsp::Diagnostic,
2001 abs_path: Option<&'a Path>,
2002) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
2003 diagnostic
2004 .related_information
2005 .iter()
2006 .flatten()
2007 .filter_map(move |info| {
2008 if info.location.uri.to_file_path().ok()? == abs_path? {
2009 let info_start = PointUtf16::new(
2010 info.location.range.start.line,
2011 info.location.range.start.character,
2012 );
2013 let info_end = PointUtf16::new(
2014 info.location.range.end.line,
2015 info.location.range.end.character,
2016 );
2017 Some(info_start..info_end)
2018 } else {
2019 None
2020 }
2021 })
2022 .chain(Some(
2023 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
2024 ))
2025}
2026
2027pub fn contiguous_ranges(
2028 values: impl Iterator<Item = u32>,
2029 max_len: usize,
2030) -> impl Iterator<Item = Range<u32>> {
2031 let mut values = values.into_iter();
2032 let mut current_range: Option<Range<u32>> = None;
2033 std::iter::from_fn(move || loop {
2034 if let Some(value) = values.next() {
2035 if let Some(range) = &mut current_range {
2036 if value == range.end && range.len() < max_len {
2037 range.end += 1;
2038 continue;
2039 }
2040 }
2041
2042 let prev_range = current_range.clone();
2043 current_range = Some(value..(value + 1));
2044 if prev_range.is_some() {
2045 return prev_range;
2046 }
2047 } else {
2048 return current_range.take();
2049 }
2050 })
2051}