1use crate::diagnostic_set::DiagnosticEntry;
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use futures::FutureExt as _;
11use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
12use lazy_static::lazy_static;
13use lsp::LanguageServer;
14use parking_lot::Mutex;
15use postage::{prelude::Stream, sink::Sink, watch};
16use similar::{ChangeTag, TextDiff};
17use smol::future::yield_now;
18use std::{
19 any::Any,
20 cell::RefCell,
21 cmp,
22 collections::{BTreeMap, HashMap, HashSet},
23 ffi::OsString,
24 future::Future,
25 iter::{Iterator, Peekable},
26 ops::{Deref, DerefMut, Range},
27 path::{Path, PathBuf},
28 str,
29 sync::Arc,
30 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
31 vec,
32};
33use sum_tree::TreeMap;
34use text::{operation_queue::OperationQueue, rope::TextDimension};
35pub use text::{Buffer as TextBuffer, Operation as _, *};
36use theme::SyntaxTheme;
37use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
38use util::{post_inc, TryFutureExt as _};
39
40#[cfg(any(test, feature = "test-support"))]
41pub use tree_sitter_rust;
42
43pub use lsp::DiagnosticSeverity;
44
45thread_local! {
46 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
47}
48
49lazy_static! {
50 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
51}
52
53// TODO - Make this configurable
54const INDENT_SIZE: u32 = 4;
55
56pub struct Buffer {
57 text: TextBuffer,
58 file: Option<Box<dyn File>>,
59 saved_version: clock::Global,
60 saved_mtime: SystemTime,
61 language: Option<Arc<Language>>,
62 autoindent_requests: Vec<Arc<AutoindentRequest>>,
63 pending_autoindent: Option<Task<()>>,
64 sync_parse_timeout: Duration,
65 syntax_tree: Mutex<Option<SyntaxTree>>,
66 parsing_in_background: bool,
67 parse_count: usize,
68 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
69 diagnostics: DiagnosticSet,
70 diagnostics_update_count: usize,
71 language_server: Option<LanguageServerState>,
72 deferred_ops: OperationQueue<Operation>,
73 #[cfg(test)]
74 pub(crate) operations: Vec<Operation>,
75}
76
77pub struct BufferSnapshot {
78 text: text::BufferSnapshot,
79 tree: Option<Tree>,
80 diagnostics: DiagnosticSet,
81 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
82 diagnostics_update_count: usize,
83 is_parsing: bool,
84 language: Option<Arc<Language>>,
85 parse_count: usize,
86}
87
88#[derive(Clone, Debug, PartialEq, Eq)]
89pub struct Diagnostic {
90 pub severity: DiagnosticSeverity,
91 pub message: String,
92 pub group_id: usize,
93 pub is_primary: bool,
94}
95
96struct LanguageServerState {
97 server: Arc<LanguageServer>,
98 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
99 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
100 next_version: usize,
101 _maintain_server: Task<Option<()>>,
102}
103
104#[derive(Clone)]
105struct LanguageServerSnapshot {
106 buffer_snapshot: text::BufferSnapshot,
107 version: usize,
108 path: Arc<Path>,
109}
110
111#[derive(Clone, Debug)]
112pub enum Operation {
113 Buffer(text::Operation),
114 UpdateDiagnostics {
115 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
116 lamport_timestamp: clock::Lamport,
117 },
118 UpdateSelections {
119 replica_id: ReplicaId,
120 selections: Arc<[Selection<Anchor>]>,
121 lamport_timestamp: clock::Lamport,
122 },
123 RemoveSelections {
124 replica_id: ReplicaId,
125 lamport_timestamp: clock::Lamport,
126 },
127}
128
129#[derive(Clone, Debug, Eq, PartialEq)]
130pub enum Event {
131 Edited,
132 Dirtied,
133 Saved,
134 FileHandleChanged,
135 Reloaded,
136 Reparsed,
137 DiagnosticsUpdated,
138 Closed,
139}
140
141pub trait File {
142 fn worktree_id(&self) -> usize;
143
144 fn entry_id(&self) -> Option<usize>;
145
146 fn mtime(&self) -> SystemTime;
147
148 /// Returns the path of this file relative to the worktree's root directory.
149 fn path(&self) -> &Arc<Path>;
150
151 /// Returns the absolute path of this file.
152 fn abs_path(&self) -> Option<PathBuf>;
153
154 /// Returns the path of this file relative to the worktree's parent directory (this means it
155 /// includes the name of the worktree's root folder).
156 fn full_path(&self) -> PathBuf;
157
158 /// Returns the last component of this handle's absolute path. If this handle refers to the root
159 /// of its worktree, then this method will return the name of the worktree itself.
160 fn file_name(&self) -> Option<OsString>;
161
162 fn is_deleted(&self) -> bool;
163
164 fn save(
165 &self,
166 buffer_id: u64,
167 text: Rope,
168 version: clock::Global,
169 cx: &mut MutableAppContext,
170 ) -> Task<Result<(clock::Global, SystemTime)>>;
171
172 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
173
174 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
175
176 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
177
178 fn boxed_clone(&self) -> Box<dyn File>;
179
180 fn as_any(&self) -> &dyn Any;
181}
182
183struct QueryCursorHandle(Option<QueryCursor>);
184
185#[derive(Clone)]
186struct SyntaxTree {
187 tree: Tree,
188 version: clock::Global,
189}
190
191#[derive(Clone)]
192struct AutoindentRequest {
193 before_edit: BufferSnapshot,
194 edited: Vec<Anchor>,
195 inserted: Option<Vec<Range<Anchor>>>,
196}
197
198#[derive(Debug)]
199struct IndentSuggestion {
200 basis_row: u32,
201 indent: bool,
202}
203
204struct TextProvider<'a>(&'a Rope);
205
206struct BufferChunkHighlights<'a> {
207 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
208 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
209 stack: Vec<(usize, HighlightId)>,
210 highlight_map: HighlightMap,
211 theme: &'a SyntaxTheme,
212 _query_cursor: QueryCursorHandle,
213}
214
215pub struct BufferChunks<'a> {
216 range: Range<usize>,
217 chunks: rope::Chunks<'a>,
218 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
219 error_depth: usize,
220 warning_depth: usize,
221 information_depth: usize,
222 hint_depth: usize,
223 highlights: Option<BufferChunkHighlights<'a>>,
224}
225
226#[derive(Clone, Copy, Debug, Default)]
227pub struct Chunk<'a> {
228 pub text: &'a str,
229 pub highlight_style: Option<HighlightStyle>,
230 pub diagnostic: Option<DiagnosticSeverity>,
231}
232
233pub(crate) struct Diff {
234 base_version: clock::Global,
235 new_text: Arc<str>,
236 changes: Vec<(ChangeTag, usize)>,
237}
238
239#[derive(Clone, Copy)]
240struct DiagnosticEndpoint {
241 offset: usize,
242 is_start: bool,
243 severity: DiagnosticSeverity,
244}
245
246impl Buffer {
247 pub fn new<T: Into<Arc<str>>>(
248 replica_id: ReplicaId,
249 base_text: T,
250 cx: &mut ModelContext<Self>,
251 ) -> Self {
252 Self::build(
253 TextBuffer::new(
254 replica_id,
255 cx.model_id() as u64,
256 History::new(base_text.into()),
257 ),
258 None,
259 )
260 }
261
262 pub fn from_file<T: Into<Arc<str>>>(
263 replica_id: ReplicaId,
264 base_text: T,
265 file: Box<dyn File>,
266 cx: &mut ModelContext<Self>,
267 ) -> Self {
268 Self::build(
269 TextBuffer::new(
270 replica_id,
271 cx.model_id() as u64,
272 History::new(base_text.into()),
273 ),
274 Some(file),
275 )
276 }
277
278 pub fn from_proto(
279 replica_id: ReplicaId,
280 message: proto::Buffer,
281 file: Option<Box<dyn File>>,
282 cx: &mut ModelContext<Self>,
283 ) -> Result<Self> {
284 let mut buffer =
285 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
286 let ops = message
287 .history
288 .into_iter()
289 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
290 buffer.apply_ops(ops)?;
291 let mut this = Self::build(buffer, file);
292 this.apply_diagnostic_update(
293 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
294 cx,
295 );
296
297 Ok(this)
298 }
299
300 pub fn to_proto(&self) -> proto::Buffer {
301 proto::Buffer {
302 id: self.remote_id(),
303 content: self.text.base_text().to_string(),
304 history: self
305 .text
306 .history()
307 .map(proto::serialize_edit_operation)
308 .collect(),
309 selections: Vec::new(),
310 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
311 }
312 }
313
314 pub fn with_language(
315 mut self,
316 language: Option<Arc<Language>>,
317 language_server: Option<Arc<LanguageServer>>,
318 cx: &mut ModelContext<Self>,
319 ) -> Self {
320 self.set_language(language, language_server, cx);
321 self
322 }
323
324 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
325 let saved_mtime;
326 if let Some(file) = file.as_ref() {
327 saved_mtime = file.mtime();
328 } else {
329 saved_mtime = UNIX_EPOCH;
330 }
331
332 Self {
333 saved_mtime,
334 saved_version: buffer.version(),
335 text: buffer,
336 file,
337 syntax_tree: Mutex::new(None),
338 parsing_in_background: false,
339 parse_count: 0,
340 sync_parse_timeout: Duration::from_millis(1),
341 autoindent_requests: Default::default(),
342 pending_autoindent: Default::default(),
343 language: None,
344 remote_selections: Default::default(),
345 diagnostics: Default::default(),
346 diagnostics_update_count: 0,
347 language_server: None,
348 deferred_ops: OperationQueue::new(),
349 #[cfg(test)]
350 operations: Default::default(),
351 }
352 }
353
354 pub fn snapshot(&self) -> BufferSnapshot {
355 BufferSnapshot {
356 text: self.text.snapshot(),
357 tree: self.syntax_tree(),
358 remote_selections: self.remote_selections.clone(),
359 diagnostics: self.diagnostics.clone(),
360 diagnostics_update_count: self.diagnostics_update_count,
361 is_parsing: self.parsing_in_background,
362 language: self.language.clone(),
363 parse_count: self.parse_count,
364 }
365 }
366
367 pub fn file(&self) -> Option<&dyn File> {
368 self.file.as_deref()
369 }
370
371 pub fn save(
372 &mut self,
373 cx: &mut ModelContext<Self>,
374 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
375 let file = self
376 .file
377 .as_ref()
378 .ok_or_else(|| anyhow!("buffer has no file"))?;
379 let text = self.as_rope().clone();
380 let version = self.version();
381 let save = file.save(self.remote_id(), text, version, cx.as_mut());
382 Ok(cx.spawn(|this, mut cx| async move {
383 let (version, mtime) = save.await?;
384 this.update(&mut cx, |this, cx| {
385 this.did_save(version.clone(), mtime, None, cx);
386 });
387 Ok((version, mtime))
388 }))
389 }
390
391 pub fn set_language(
392 &mut self,
393 language: Option<Arc<Language>>,
394 language_server: Option<Arc<lsp::LanguageServer>>,
395 cx: &mut ModelContext<Self>,
396 ) {
397 self.language = language;
398 self.language_server = if let Some(server) = language_server {
399 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
400 Some(LanguageServerState {
401 latest_snapshot: latest_snapshot_tx,
402 pending_snapshots: Default::default(),
403 next_version: 0,
404 server: server.clone(),
405 _maintain_server: cx.background().spawn(
406 async move {
407 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
408 while let Some(snapshot) = latest_snapshot_rx.recv().await {
409 if let Some(snapshot) = snapshot {
410 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
411 if let Some(prev_snapshot) = prev_snapshot {
412 let changes = lsp::DidChangeTextDocumentParams {
413 text_document: lsp::VersionedTextDocumentIdentifier::new(
414 uri,
415 snapshot.version as i32,
416 ),
417 content_changes: snapshot
418 .buffer_snapshot
419 .edits_since::<(PointUtf16, usize)>(
420 prev_snapshot.buffer_snapshot.version(),
421 )
422 .map(|edit| {
423 let edit_start = edit.new.start.0;
424 let edit_end = edit_start
425 + (edit.old.end.0 - edit.old.start.0);
426 let new_text = snapshot
427 .buffer_snapshot
428 .text_for_range(
429 edit.new.start.1..edit.new.end.1,
430 )
431 .collect();
432 lsp::TextDocumentContentChangeEvent {
433 range: Some(lsp::Range::new(
434 lsp::Position::new(
435 edit_start.row,
436 edit_start.column,
437 ),
438 lsp::Position::new(
439 edit_end.row,
440 edit_end.column,
441 ),
442 )),
443 range_length: None,
444 text: new_text,
445 }
446 })
447 .collect(),
448 };
449 server
450 .notify::<lsp::notification::DidChangeTextDocument>(changes)
451 .await?;
452 } else {
453 server
454 .notify::<lsp::notification::DidOpenTextDocument>(
455 lsp::DidOpenTextDocumentParams {
456 text_document: lsp::TextDocumentItem::new(
457 uri,
458 Default::default(),
459 snapshot.version as i32,
460 snapshot.buffer_snapshot.text().to_string(),
461 ),
462 },
463 )
464 .await?;
465 }
466
467 prev_snapshot = Some(snapshot);
468 }
469 }
470 Ok(())
471 }
472 .log_err(),
473 ),
474 })
475 } else {
476 None
477 };
478
479 self.reparse(cx);
480 self.update_language_server();
481 }
482
483 pub fn did_save(
484 &mut self,
485 version: clock::Global,
486 mtime: SystemTime,
487 new_file: Option<Box<dyn File>>,
488 cx: &mut ModelContext<Self>,
489 ) {
490 self.saved_mtime = mtime;
491 self.saved_version = version;
492 if let Some(new_file) = new_file {
493 self.file = Some(new_file);
494 }
495 if let Some(state) = &self.language_server {
496 cx.background()
497 .spawn(
498 state
499 .server
500 .notify::<lsp::notification::DidSaveTextDocument>(
501 lsp::DidSaveTextDocumentParams {
502 text_document: lsp::TextDocumentIdentifier {
503 uri: lsp::Url::from_file_path(
504 self.file.as_ref().unwrap().abs_path().unwrap(),
505 )
506 .unwrap(),
507 },
508 text: None,
509 },
510 ),
511 )
512 .detach()
513 }
514 cx.emit(Event::Saved);
515 }
516
517 pub fn file_updated(
518 &mut self,
519 new_file: Box<dyn File>,
520 cx: &mut ModelContext<Self>,
521 ) -> Option<Task<()>> {
522 let old_file = self.file.as_ref()?;
523 let mut file_changed = false;
524 let mut task = None;
525
526 if new_file.path() != old_file.path() {
527 file_changed = true;
528 }
529
530 if new_file.is_deleted() {
531 if !old_file.is_deleted() {
532 file_changed = true;
533 if !self.is_dirty() {
534 cx.emit(Event::Dirtied);
535 }
536 }
537 } else {
538 let new_mtime = new_file.mtime();
539 if new_mtime != old_file.mtime() {
540 file_changed = true;
541
542 if !self.is_dirty() {
543 task = Some(cx.spawn(|this, mut cx| {
544 async move {
545 let new_text = this.read_with(&cx, |this, cx| {
546 this.file.as_ref().and_then(|file| file.load_local(cx))
547 });
548 if let Some(new_text) = new_text {
549 let new_text = new_text.await?;
550 let diff = this
551 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
552 .await;
553 this.update(&mut cx, |this, cx| {
554 if this.apply_diff(diff, cx) {
555 this.saved_version = this.version();
556 this.saved_mtime = new_mtime;
557 cx.emit(Event::Reloaded);
558 }
559 });
560 }
561 Ok(())
562 }
563 .log_err()
564 .map(drop)
565 }));
566 }
567 }
568 }
569
570 if file_changed {
571 cx.emit(Event::FileHandleChanged);
572 }
573 self.file = Some(new_file);
574 task
575 }
576
577 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
578 cx.emit(Event::Closed);
579 }
580
581 pub fn language(&self) -> Option<&Arc<Language>> {
582 self.language.as_ref()
583 }
584
585 pub fn parse_count(&self) -> usize {
586 self.parse_count
587 }
588
589 pub fn diagnostics_update_count(&self) -> usize {
590 self.diagnostics_update_count
591 }
592
593 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
594 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
595 self.interpolate_tree(syntax_tree);
596 Some(syntax_tree.tree.clone())
597 } else {
598 None
599 }
600 }
601
602 #[cfg(any(test, feature = "test-support"))]
603 pub fn is_parsing(&self) -> bool {
604 self.parsing_in_background
605 }
606
607 #[cfg(test)]
608 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
609 self.sync_parse_timeout = timeout;
610 }
611
612 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
613 if self.parsing_in_background {
614 return false;
615 }
616
617 if let Some(grammar) = self.grammar().cloned() {
618 let old_tree = self.syntax_tree();
619 let text = self.as_rope().clone();
620 let parsed_version = self.version();
621 let parse_task = cx.background().spawn({
622 let grammar = grammar.clone();
623 async move { Self::parse_text(&text, old_tree, &grammar) }
624 });
625
626 match cx
627 .background()
628 .block_with_timeout(self.sync_parse_timeout, parse_task)
629 {
630 Ok(new_tree) => {
631 self.did_finish_parsing(new_tree, parsed_version, cx);
632 return true;
633 }
634 Err(parse_task) => {
635 self.parsing_in_background = true;
636 cx.spawn(move |this, mut cx| async move {
637 let new_tree = parse_task.await;
638 this.update(&mut cx, move |this, cx| {
639 let grammar_changed = this
640 .grammar()
641 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
642 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
643 this.parsing_in_background = false;
644 this.did_finish_parsing(new_tree, parsed_version, cx);
645
646 if parse_again && this.reparse(cx) {
647 return;
648 }
649 });
650 })
651 .detach();
652 }
653 }
654 }
655 false
656 }
657
658 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
659 PARSER.with(|parser| {
660 let mut parser = parser.borrow_mut();
661 parser
662 .set_language(grammar.ts_language)
663 .expect("incompatible grammar");
664 let mut chunks = text.chunks_in_range(0..text.len());
665 let tree = parser
666 .parse_with(
667 &mut move |offset, _| {
668 chunks.seek(offset);
669 chunks.next().unwrap_or("").as_bytes()
670 },
671 old_tree.as_ref(),
672 )
673 .unwrap();
674 tree
675 })
676 }
677
678 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
679 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
680 let (bytes, lines) = edit.flatten();
681 tree.tree.edit(&InputEdit {
682 start_byte: bytes.new.start,
683 old_end_byte: bytes.new.start + bytes.old.len(),
684 new_end_byte: bytes.new.end,
685 start_position: lines.new.start.to_ts_point(),
686 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
687 .to_ts_point(),
688 new_end_position: lines.new.end.to_ts_point(),
689 });
690 }
691 tree.version = self.version();
692 }
693
694 fn did_finish_parsing(
695 &mut self,
696 tree: Tree,
697 version: clock::Global,
698 cx: &mut ModelContext<Self>,
699 ) {
700 self.parse_count += 1;
701 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
702 self.request_autoindent(cx);
703 cx.emit(Event::Reparsed);
704 cx.notify();
705 }
706
707 pub fn update_diagnostics(
708 &mut self,
709 version: Option<i32>,
710 mut diagnostics: Vec<lsp::Diagnostic>,
711 cx: &mut ModelContext<Self>,
712 ) -> Result<Operation> {
713 diagnostics.sort_unstable_by_key(|d| (d.range.start, d.range.end));
714
715 let version = version.map(|version| version as usize);
716 let content = if let Some(version) = version {
717 let language_server = self.language_server.as_mut().unwrap();
718 let snapshot = language_server
719 .pending_snapshots
720 .get(&version)
721 .ok_or_else(|| anyhow!("missing snapshot"))?;
722 &snapshot.buffer_snapshot
723 } else {
724 self.deref()
725 };
726 let abs_path = self.file.as_ref().and_then(|f| f.abs_path());
727
728 let empty_set = HashSet::new();
729 let disk_based_sources = self
730 .language
731 .as_ref()
732 .and_then(|language| language.disk_based_diagnostic_sources())
733 .unwrap_or(&empty_set);
734
735 let mut edits_since_save = content
736 .edits_since::<PointUtf16>(&self.saved_version)
737 .peekable();
738 let mut last_edit_old_end = PointUtf16::zero();
739 let mut last_edit_new_end = PointUtf16::zero();
740 let mut group_ids_by_diagnostic_range = HashMap::new();
741 let mut diagnostics_by_group_id = HashMap::new();
742 let mut next_group_id = 0;
743 'outer: for diagnostic in &diagnostics {
744 let mut start = diagnostic.range.start.to_point_utf16();
745 let mut end = diagnostic.range.end.to_point_utf16();
746 let source = diagnostic.source.as_ref();
747 let code = diagnostic.code.as_ref();
748 let group_id = diagnostic_ranges(&diagnostic, abs_path.as_deref())
749 .find_map(|range| group_ids_by_diagnostic_range.get(&(source, code, range)))
750 .copied()
751 .unwrap_or_else(|| {
752 let group_id = post_inc(&mut next_group_id);
753 for range in diagnostic_ranges(&diagnostic, abs_path.as_deref()) {
754 group_ids_by_diagnostic_range.insert((source, code, range), group_id);
755 }
756 group_id
757 });
758
759 if diagnostic
760 .source
761 .as_ref()
762 .map_or(false, |source| disk_based_sources.contains(source))
763 {
764 while let Some(edit) = edits_since_save.peek() {
765 if edit.old.end <= start {
766 last_edit_old_end = edit.old.end;
767 last_edit_new_end = edit.new.end;
768 edits_since_save.next();
769 } else if edit.old.start <= end && edit.old.end >= start {
770 continue 'outer;
771 } else {
772 break;
773 }
774 }
775
776 start = last_edit_new_end + (start - last_edit_old_end);
777 end = last_edit_new_end + (end - last_edit_old_end);
778 }
779
780 let mut range = content.clip_point_utf16(start, Bias::Left)
781 ..content.clip_point_utf16(end, Bias::Right);
782 if range.start == range.end {
783 range.end.column += 1;
784 range.end = content.clip_point_utf16(range.end, Bias::Right);
785 if range.start == range.end && range.end.column > 0 {
786 range.start.column -= 1;
787 range.start = content.clip_point_utf16(range.start, Bias::Left);
788 }
789 }
790
791 diagnostics_by_group_id
792 .entry(group_id)
793 .or_insert(Vec::new())
794 .push(DiagnosticEntry {
795 range,
796 diagnostic: Diagnostic {
797 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
798 message: diagnostic.message.clone(),
799 group_id,
800 is_primary: false,
801 },
802 });
803 }
804
805 drop(edits_since_save);
806 let new_diagnostics = DiagnosticSet::new(
807 diagnostics_by_group_id
808 .into_values()
809 .flat_map(|mut diagnostics| {
810 let primary = diagnostics
811 .iter_mut()
812 .min_by_key(|entry| entry.diagnostic.severity)
813 .unwrap();
814 primary.diagnostic.is_primary = true;
815 diagnostics
816 }),
817 content,
818 );
819 self.diagnostics = new_diagnostics;
820
821 if let Some(version) = version {
822 let language_server = self.language_server.as_mut().unwrap();
823 let versions_to_delete = language_server
824 .pending_snapshots
825 .range(..version)
826 .map(|(v, _)| *v)
827 .collect::<Vec<_>>();
828 for version in versions_to_delete {
829 language_server.pending_snapshots.remove(&version);
830 }
831 }
832
833 self.diagnostics_update_count += 1;
834 cx.notify();
835 cx.emit(Event::DiagnosticsUpdated);
836 Ok(Operation::UpdateDiagnostics {
837 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
838 lamport_timestamp: self.lamport_timestamp(),
839 })
840 }
841
842 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
843 if let Some(indent_columns) = self.compute_autoindents() {
844 let indent_columns = cx.background().spawn(indent_columns);
845 match cx
846 .background()
847 .block_with_timeout(Duration::from_micros(500), indent_columns)
848 {
849 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
850 Err(indent_columns) => {
851 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
852 let indent_columns = indent_columns.await;
853 this.update(&mut cx, |this, cx| {
854 this.apply_autoindents(indent_columns, cx);
855 });
856 }));
857 }
858 }
859 }
860 }
861
862 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
863 let max_rows_between_yields = 100;
864 let snapshot = self.snapshot();
865 if snapshot.language.is_none()
866 || snapshot.tree.is_none()
867 || self.autoindent_requests.is_empty()
868 {
869 return None;
870 }
871
872 let autoindent_requests = self.autoindent_requests.clone();
873 Some(async move {
874 let mut indent_columns = BTreeMap::new();
875 for request in autoindent_requests {
876 let old_to_new_rows = request
877 .edited
878 .iter()
879 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
880 .zip(
881 request
882 .edited
883 .iter()
884 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
885 )
886 .collect::<BTreeMap<u32, u32>>();
887
888 let mut old_suggestions = HashMap::<u32, u32>::default();
889 let old_edited_ranges =
890 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
891 for old_edited_range in old_edited_ranges {
892 let suggestions = request
893 .before_edit
894 .suggest_autoindents(old_edited_range.clone())
895 .into_iter()
896 .flatten();
897 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
898 let indentation_basis = old_to_new_rows
899 .get(&suggestion.basis_row)
900 .and_then(|from_row| old_suggestions.get(from_row).copied())
901 .unwrap_or_else(|| {
902 request
903 .before_edit
904 .indent_column_for_line(suggestion.basis_row)
905 });
906 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
907 old_suggestions.insert(
908 *old_to_new_rows.get(&old_row).unwrap(),
909 indentation_basis + delta,
910 );
911 }
912 yield_now().await;
913 }
914
915 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
916 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
917 let new_edited_row_ranges =
918 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
919 for new_edited_row_range in new_edited_row_ranges {
920 let suggestions = snapshot
921 .suggest_autoindents(new_edited_row_range.clone())
922 .into_iter()
923 .flatten();
924 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
925 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
926 let new_indentation = indent_columns
927 .get(&suggestion.basis_row)
928 .copied()
929 .unwrap_or_else(|| {
930 snapshot.indent_column_for_line(suggestion.basis_row)
931 })
932 + delta;
933 if old_suggestions
934 .get(&new_row)
935 .map_or(true, |old_indentation| new_indentation != *old_indentation)
936 {
937 indent_columns.insert(new_row, new_indentation);
938 }
939 }
940 yield_now().await;
941 }
942
943 if let Some(inserted) = request.inserted.as_ref() {
944 let inserted_row_ranges = contiguous_ranges(
945 inserted
946 .iter()
947 .map(|range| range.to_point(&snapshot))
948 .flat_map(|range| range.start.row..range.end.row + 1),
949 max_rows_between_yields,
950 );
951 for inserted_row_range in inserted_row_ranges {
952 let suggestions = snapshot
953 .suggest_autoindents(inserted_row_range.clone())
954 .into_iter()
955 .flatten();
956 for (row, suggestion) in inserted_row_range.zip(suggestions) {
957 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
958 let new_indentation = indent_columns
959 .get(&suggestion.basis_row)
960 .copied()
961 .unwrap_or_else(|| {
962 snapshot.indent_column_for_line(suggestion.basis_row)
963 })
964 + delta;
965 indent_columns.insert(row, new_indentation);
966 }
967 yield_now().await;
968 }
969 }
970 }
971 indent_columns
972 })
973 }
974
975 fn apply_autoindents(
976 &mut self,
977 indent_columns: BTreeMap<u32, u32>,
978 cx: &mut ModelContext<Self>,
979 ) {
980 self.start_transaction();
981 for (row, indent_column) in &indent_columns {
982 self.set_indent_column_for_line(*row, *indent_column, cx);
983 }
984 self.end_transaction(cx);
985 }
986
987 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
988 let current_column = self.indent_column_for_line(row);
989 if column > current_column {
990 let offset = Point::new(row, 0).to_offset(&*self);
991 self.edit(
992 [offset..offset],
993 " ".repeat((column - current_column) as usize),
994 cx,
995 );
996 } else if column < current_column {
997 self.edit(
998 [Point::new(row, 0)..Point::new(row, current_column - column)],
999 "",
1000 cx,
1001 );
1002 }
1003 }
1004
1005 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1006 // TODO: it would be nice to not allocate here.
1007 let old_text = self.text();
1008 let base_version = self.version();
1009 cx.background().spawn(async move {
1010 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1011 .iter_all_changes()
1012 .map(|c| (c.tag(), c.value().len()))
1013 .collect::<Vec<_>>();
1014 Diff {
1015 base_version,
1016 new_text,
1017 changes,
1018 }
1019 })
1020 }
1021
1022 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1023 if self.version == diff.base_version {
1024 self.start_transaction();
1025 let mut offset = 0;
1026 for (tag, len) in diff.changes {
1027 let range = offset..(offset + len);
1028 match tag {
1029 ChangeTag::Equal => offset += len,
1030 ChangeTag::Delete => self.edit(Some(range), "", cx),
1031 ChangeTag::Insert => {
1032 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1033 offset += len;
1034 }
1035 }
1036 }
1037 self.end_transaction(cx);
1038 true
1039 } else {
1040 false
1041 }
1042 }
1043
1044 pub fn is_dirty(&self) -> bool {
1045 !self.saved_version.ge(&self.version)
1046 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1047 }
1048
1049 pub fn has_conflict(&self) -> bool {
1050 !self.saved_version.ge(&self.version)
1051 && self
1052 .file
1053 .as_ref()
1054 .map_or(false, |file| file.mtime() > self.saved_mtime)
1055 }
1056
1057 pub fn subscribe(&mut self) -> Subscription {
1058 self.text.subscribe()
1059 }
1060
1061 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1062 self.start_transaction_at(Instant::now())
1063 }
1064
1065 pub(crate) fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1066 self.text.start_transaction_at(now)
1067 }
1068
1069 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1070 self.end_transaction_at(Instant::now(), cx)
1071 }
1072
1073 pub(crate) fn end_transaction_at(
1074 &mut self,
1075 now: Instant,
1076 cx: &mut ModelContext<Self>,
1077 ) -> Option<TransactionId> {
1078 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1079 let was_dirty = start_version != self.saved_version;
1080 self.did_edit(&start_version, was_dirty, cx);
1081 Some(transaction_id)
1082 } else {
1083 None
1084 }
1085 }
1086
1087 fn update_language_server(&mut self) {
1088 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1089 language_server
1090 } else {
1091 return;
1092 };
1093 let abs_path = self
1094 .file
1095 .as_ref()
1096 .map_or(Path::new("/").to_path_buf(), |file| {
1097 file.abs_path().unwrap()
1098 });
1099
1100 let version = post_inc(&mut language_server.next_version);
1101 let snapshot = LanguageServerSnapshot {
1102 buffer_snapshot: self.text.snapshot(),
1103 version,
1104 path: Arc::from(abs_path),
1105 };
1106 language_server
1107 .pending_snapshots
1108 .insert(version, snapshot.clone());
1109 let _ = language_server
1110 .latest_snapshot
1111 .blocking_send(Some(snapshot));
1112 }
1113
1114 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1115 where
1116 I: IntoIterator<Item = Range<S>>,
1117 S: ToOffset,
1118 T: Into<String>,
1119 {
1120 self.edit_internal(ranges_iter, new_text, false, cx)
1121 }
1122
1123 pub fn edit_with_autoindent<I, S, T>(
1124 &mut self,
1125 ranges_iter: I,
1126 new_text: T,
1127 cx: &mut ModelContext<Self>,
1128 ) where
1129 I: IntoIterator<Item = Range<S>>,
1130 S: ToOffset,
1131 T: Into<String>,
1132 {
1133 self.edit_internal(ranges_iter, new_text, true, cx)
1134 }
1135
1136 pub fn edit_internal<I, S, T>(
1137 &mut self,
1138 ranges_iter: I,
1139 new_text: T,
1140 autoindent: bool,
1141 cx: &mut ModelContext<Self>,
1142 ) where
1143 I: IntoIterator<Item = Range<S>>,
1144 S: ToOffset,
1145 T: Into<String>,
1146 {
1147 let new_text = new_text.into();
1148
1149 // Skip invalid ranges and coalesce contiguous ones.
1150 let mut ranges: Vec<Range<usize>> = Vec::new();
1151 for range in ranges_iter {
1152 let range = range.start.to_offset(self)..range.end.to_offset(self);
1153 if !new_text.is_empty() || !range.is_empty() {
1154 if let Some(prev_range) = ranges.last_mut() {
1155 if prev_range.end >= range.start {
1156 prev_range.end = cmp::max(prev_range.end, range.end);
1157 } else {
1158 ranges.push(range);
1159 }
1160 } else {
1161 ranges.push(range);
1162 }
1163 }
1164 }
1165 if ranges.is_empty() {
1166 return;
1167 }
1168
1169 self.start_transaction();
1170 self.pending_autoindent.take();
1171 let autoindent_request = if autoindent && self.language.is_some() {
1172 let before_edit = self.snapshot();
1173 let edited = ranges
1174 .iter()
1175 .filter_map(|range| {
1176 let start = range.start.to_point(self);
1177 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1178 None
1179 } else {
1180 Some(self.anchor_before(range.start))
1181 }
1182 })
1183 .collect();
1184 Some((before_edit, edited))
1185 } else {
1186 None
1187 };
1188
1189 let first_newline_ix = new_text.find('\n');
1190 let new_text_len = new_text.len();
1191
1192 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1193
1194 if let Some((before_edit, edited)) = autoindent_request {
1195 let mut inserted = None;
1196 if let Some(first_newline_ix) = first_newline_ix {
1197 let mut delta = 0isize;
1198 inserted = Some(
1199 ranges
1200 .iter()
1201 .map(|range| {
1202 let start =
1203 (delta + range.start as isize) as usize + first_newline_ix + 1;
1204 let end = (delta + range.start as isize) as usize + new_text_len;
1205 delta +=
1206 (range.end as isize - range.start as isize) + new_text_len as isize;
1207 self.anchor_before(start)..self.anchor_after(end)
1208 })
1209 .collect(),
1210 );
1211 }
1212
1213 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1214 before_edit,
1215 edited,
1216 inserted,
1217 }));
1218 }
1219
1220 self.end_transaction(cx);
1221 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1222 }
1223
1224 fn did_edit(
1225 &mut self,
1226 old_version: &clock::Global,
1227 was_dirty: bool,
1228 cx: &mut ModelContext<Self>,
1229 ) {
1230 if self.edits_since::<usize>(old_version).next().is_none() {
1231 return;
1232 }
1233
1234 self.reparse(cx);
1235 self.update_language_server();
1236
1237 cx.emit(Event::Edited);
1238 if !was_dirty {
1239 cx.emit(Event::Dirtied);
1240 }
1241 cx.notify();
1242 }
1243
1244 fn grammar(&self) -> Option<&Arc<Grammar>> {
1245 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1246 }
1247
1248 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1249 &mut self,
1250 ops: I,
1251 cx: &mut ModelContext<Self>,
1252 ) -> Result<()> {
1253 self.pending_autoindent.take();
1254 let was_dirty = self.is_dirty();
1255 let old_version = self.version.clone();
1256 let mut deferred_ops = Vec::new();
1257 let buffer_ops = ops
1258 .into_iter()
1259 .filter_map(|op| match op {
1260 Operation::Buffer(op) => Some(op),
1261 _ => {
1262 if self.can_apply_op(&op) {
1263 self.apply_op(op, cx);
1264 } else {
1265 deferred_ops.push(op);
1266 }
1267 None
1268 }
1269 })
1270 .collect::<Vec<_>>();
1271 self.text.apply_ops(buffer_ops)?;
1272 self.flush_deferred_ops(cx);
1273 self.did_edit(&old_version, was_dirty, cx);
1274 // Notify independently of whether the buffer was edited as the operations could include a
1275 // selection update.
1276 cx.notify();
1277 Ok(())
1278 }
1279
1280 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1281 let mut deferred_ops = Vec::new();
1282 for op in self.deferred_ops.drain().iter().cloned() {
1283 if self.can_apply_op(&op) {
1284 self.apply_op(op, cx);
1285 } else {
1286 deferred_ops.push(op);
1287 }
1288 }
1289 self.deferred_ops.insert(deferred_ops);
1290 }
1291
1292 fn can_apply_op(&self, operation: &Operation) -> bool {
1293 match operation {
1294 Operation::Buffer(_) => {
1295 unreachable!("buffer operations should never be applied at this layer")
1296 }
1297 Operation::UpdateDiagnostics { diagnostics, .. } => {
1298 diagnostics.iter().all(|diagnostic| {
1299 self.text.can_resolve(&diagnostic.range.start)
1300 && self.text.can_resolve(&diagnostic.range.end)
1301 })
1302 }
1303 Operation::UpdateSelections { selections, .. } => selections
1304 .iter()
1305 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1306 Operation::RemoveSelections { .. } => true,
1307 }
1308 }
1309
1310 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1311 match operation {
1312 Operation::Buffer(_) => {
1313 unreachable!("buffer operations should never be applied at this layer")
1314 }
1315 Operation::UpdateDiagnostics { diagnostics, .. } => {
1316 self.apply_diagnostic_update(diagnostics, cx);
1317 }
1318 Operation::UpdateSelections {
1319 replica_id,
1320 selections,
1321 lamport_timestamp,
1322 } => {
1323 self.remote_selections.insert(replica_id, selections);
1324 self.text.observe_lamport_timestamp(lamport_timestamp);
1325 }
1326 Operation::RemoveSelections {
1327 replica_id: set_id,
1328 lamport_timestamp,
1329 } => {
1330 self.remote_selections.remove(&set_id);
1331 self.text.observe_lamport_timestamp(lamport_timestamp);
1332 }
1333 }
1334 }
1335
1336 fn apply_diagnostic_update(
1337 &mut self,
1338 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1339 cx: &mut ModelContext<Self>,
1340 ) {
1341 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1342 self.diagnostics_update_count += 1;
1343 cx.notify();
1344 }
1345
1346 #[cfg(not(test))]
1347 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1348 if let Some(file) = &self.file {
1349 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1350 }
1351 }
1352
1353 #[cfg(test)]
1354 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1355 self.operations.push(operation);
1356 }
1357
1358 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1359 self.remote_selections.remove(&replica_id);
1360 cx.notify();
1361 }
1362
1363 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1364 let was_dirty = self.is_dirty();
1365 let old_version = self.version.clone();
1366
1367 if let Some((transaction_id, operation)) = self.text.undo() {
1368 self.send_operation(Operation::Buffer(operation), cx);
1369 self.did_edit(&old_version, was_dirty, cx);
1370 Some(transaction_id)
1371 } else {
1372 None
1373 }
1374 }
1375
1376 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1377 let was_dirty = self.is_dirty();
1378 let old_version = self.version.clone();
1379
1380 if let Some((transaction_id, operation)) = self.text.redo() {
1381 self.send_operation(Operation::Buffer(operation), cx);
1382 self.did_edit(&old_version, was_dirty, cx);
1383 Some(transaction_id)
1384 } else {
1385 None
1386 }
1387 }
1388}
1389
1390#[cfg(any(test, feature = "test-support"))]
1391impl Buffer {
1392 pub fn randomly_edit<T>(
1393 &mut self,
1394 rng: &mut T,
1395 old_range_count: usize,
1396 cx: &mut ModelContext<Self>,
1397 ) where
1398 T: rand::Rng,
1399 {
1400 self.start_transaction();
1401 self.text.randomly_edit(rng, old_range_count);
1402 self.end_transaction(cx);
1403 }
1404}
1405
1406impl Entity for Buffer {
1407 type Event = Event;
1408
1409 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1410 if let Some(file) = self.file.as_ref() {
1411 file.buffer_removed(self.remote_id(), cx);
1412 }
1413 }
1414}
1415
1416impl Deref for Buffer {
1417 type Target = TextBuffer;
1418
1419 fn deref(&self) -> &Self::Target {
1420 &self.text
1421 }
1422}
1423
1424impl BufferSnapshot {
1425 fn suggest_autoindents<'a>(
1426 &'a self,
1427 row_range: Range<u32>,
1428 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1429 let mut query_cursor = QueryCursorHandle::new();
1430 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1431 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1432
1433 // Get the "indentation ranges" that intersect this row range.
1434 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1435 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1436 query_cursor.set_point_range(
1437 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1438 ..Point::new(row_range.end, 0).to_ts_point(),
1439 );
1440 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1441 for mat in query_cursor.matches(
1442 &grammar.indents_query,
1443 tree.root_node(),
1444 TextProvider(self.as_rope()),
1445 ) {
1446 let mut node_kind = "";
1447 let mut start: Option<Point> = None;
1448 let mut end: Option<Point> = None;
1449 for capture in mat.captures {
1450 if Some(capture.index) == indent_capture_ix {
1451 node_kind = capture.node.kind();
1452 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1453 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1454 } else if Some(capture.index) == end_capture_ix {
1455 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1456 }
1457 }
1458
1459 if let Some((start, end)) = start.zip(end) {
1460 if start.row == end.row {
1461 continue;
1462 }
1463
1464 let range = start..end;
1465 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1466 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1467 Ok(ix) => {
1468 let prev_range = &mut indentation_ranges[ix];
1469 prev_range.0.end = prev_range.0.end.max(range.end);
1470 }
1471 }
1472 }
1473 }
1474
1475 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1476 Some(row_range.map(move |row| {
1477 let row_start = Point::new(row, self.indent_column_for_line(row));
1478
1479 let mut indent_from_prev_row = false;
1480 let mut outdent_to_row = u32::MAX;
1481 for (range, _node_kind) in &indentation_ranges {
1482 if range.start.row >= row {
1483 break;
1484 }
1485
1486 if range.start.row == prev_row && range.end > row_start {
1487 indent_from_prev_row = true;
1488 }
1489 if range.end.row >= prev_row && range.end <= row_start {
1490 outdent_to_row = outdent_to_row.min(range.start.row);
1491 }
1492 }
1493
1494 let suggestion = if outdent_to_row == prev_row {
1495 IndentSuggestion {
1496 basis_row: prev_row,
1497 indent: false,
1498 }
1499 } else if indent_from_prev_row {
1500 IndentSuggestion {
1501 basis_row: prev_row,
1502 indent: true,
1503 }
1504 } else if outdent_to_row < prev_row {
1505 IndentSuggestion {
1506 basis_row: outdent_to_row,
1507 indent: false,
1508 }
1509 } else {
1510 IndentSuggestion {
1511 basis_row: prev_row,
1512 indent: false,
1513 }
1514 };
1515
1516 prev_row = row;
1517 suggestion
1518 }))
1519 } else {
1520 None
1521 }
1522 }
1523
1524 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1525 while row > 0 {
1526 row -= 1;
1527 if !self.is_line_blank(row) {
1528 return Some(row);
1529 }
1530 }
1531 None
1532 }
1533
1534 pub fn chunks<'a, T: ToOffset>(
1535 &'a self,
1536 range: Range<T>,
1537 theme: Option<&'a SyntaxTheme>,
1538 ) -> BufferChunks<'a> {
1539 let range = range.start.to_offset(self)..range.end.to_offset(self);
1540
1541 let mut highlights = None;
1542 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1543 if let Some(theme) = theme {
1544 for entry in self
1545 .diagnostics
1546 .range::<_, usize>(range.clone(), self, true)
1547 {
1548 diagnostic_endpoints.push(DiagnosticEndpoint {
1549 offset: entry.range.start,
1550 is_start: true,
1551 severity: entry.diagnostic.severity,
1552 });
1553 diagnostic_endpoints.push(DiagnosticEndpoint {
1554 offset: entry.range.end,
1555 is_start: false,
1556 severity: entry.diagnostic.severity,
1557 });
1558 }
1559 diagnostic_endpoints
1560 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1561
1562 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1563 let mut query_cursor = QueryCursorHandle::new();
1564
1565 // TODO - add a Tree-sitter API to remove the need for this.
1566 let cursor = unsafe {
1567 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1568 };
1569 let captures = cursor.set_byte_range(range.clone()).captures(
1570 &grammar.highlights_query,
1571 tree.root_node(),
1572 TextProvider(self.text.as_rope()),
1573 );
1574 highlights = Some(BufferChunkHighlights {
1575 captures,
1576 next_capture: None,
1577 stack: Default::default(),
1578 highlight_map: grammar.highlight_map(),
1579 _query_cursor: query_cursor,
1580 theme,
1581 })
1582 }
1583 }
1584
1585 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1586 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1587
1588 BufferChunks {
1589 range,
1590 chunks,
1591 diagnostic_endpoints,
1592 error_depth: 0,
1593 warning_depth: 0,
1594 information_depth: 0,
1595 hint_depth: 0,
1596 highlights,
1597 }
1598 }
1599
1600 pub fn language(&self) -> Option<&Arc<Language>> {
1601 self.language.as_ref()
1602 }
1603
1604 fn grammar(&self) -> Option<&Arc<Grammar>> {
1605 self.language
1606 .as_ref()
1607 .and_then(|language| language.grammar.as_ref())
1608 }
1609
1610 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1611 if let Some(tree) = self.tree.as_ref() {
1612 let root = tree.root_node();
1613 let range = range.start.to_offset(self)..range.end.to_offset(self);
1614 let mut node = root.descendant_for_byte_range(range.start, range.end);
1615 while node.map_or(false, |n| n.byte_range() == range) {
1616 node = node.unwrap().parent();
1617 }
1618 node.map(|n| n.byte_range())
1619 } else {
1620 None
1621 }
1622 }
1623
1624 pub fn enclosing_bracket_ranges<T: ToOffset>(
1625 &self,
1626 range: Range<T>,
1627 ) -> Option<(Range<usize>, Range<usize>)> {
1628 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1629 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1630 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1631
1632 // Find bracket pairs that *inclusively* contain the given range.
1633 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1634 let mut cursor = QueryCursorHandle::new();
1635 let matches = cursor.set_byte_range(range).matches(
1636 &grammar.brackets_query,
1637 tree.root_node(),
1638 TextProvider(self.as_rope()),
1639 );
1640
1641 // Get the ranges of the innermost pair of brackets.
1642 matches
1643 .filter_map(|mat| {
1644 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1645 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1646 Some((open.byte_range(), close.byte_range()))
1647 })
1648 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1649 }
1650
1651 pub fn remote_selections_in_range<'a, I, O>(
1652 &'a self,
1653 range: Range<I>,
1654 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = Selection<O>>)>
1655 where
1656 I: ToOffset,
1657 O: TextDimension,
1658 {
1659 let range = self.anchor_before(range.start)..self.anchor_after(range.end);
1660 self.remote_selections
1661 .iter()
1662 .map(move |(replica_id, selections)| {
1663 let start_ix = match selections
1664 .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap())
1665 {
1666 Ok(ix) | Err(ix) => ix,
1667 };
1668 let end_ix = match selections
1669 .binary_search_by(|probe| probe.start.cmp(&range.end, self).unwrap())
1670 {
1671 Ok(ix) | Err(ix) => ix,
1672 };
1673
1674 let selections = &selections[start_ix..end_ix];
1675 let mut summaries =
1676 self.summaries_for_anchors(selections.iter().flat_map(|s| [&s.start, &s.end]));
1677 let resolved = selections.iter().map(move |s| Selection {
1678 id: s.id,
1679 start: summaries.next().unwrap(),
1680 end: summaries.next().unwrap(),
1681 reversed: s.reversed,
1682 goal: s.goal,
1683 });
1684 (*replica_id, resolved)
1685 })
1686 }
1687
1688 pub fn diagnostics_in_range<'a, T, O>(
1689 &'a self,
1690 search_range: Range<T>,
1691 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1692 where
1693 T: 'a + ToOffset,
1694 O: 'a + FromAnchor,
1695 {
1696 self.diagnostics.range(search_range, self, true)
1697 }
1698
1699 pub fn diagnostic_group<'a, O>(
1700 &'a self,
1701 group_id: usize,
1702 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1703 where
1704 O: 'a + FromAnchor,
1705 {
1706 self.diagnostics.group(group_id, self)
1707 }
1708
1709 pub fn diagnostics_update_count(&self) -> usize {
1710 self.diagnostics_update_count
1711 }
1712
1713 pub fn parse_count(&self) -> usize {
1714 self.parse_count
1715 }
1716}
1717
1718impl Clone for BufferSnapshot {
1719 fn clone(&self) -> Self {
1720 Self {
1721 text: self.text.clone(),
1722 tree: self.tree.clone(),
1723 remote_selections: self.remote_selections.clone(),
1724 diagnostics: self.diagnostics.clone(),
1725 diagnostics_update_count: self.diagnostics_update_count,
1726 is_parsing: self.is_parsing,
1727 language: self.language.clone(),
1728 parse_count: self.parse_count,
1729 }
1730 }
1731}
1732
1733impl Deref for BufferSnapshot {
1734 type Target = text::BufferSnapshot;
1735
1736 fn deref(&self) -> &Self::Target {
1737 &self.text
1738 }
1739}
1740
1741impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1742 type I = ByteChunks<'a>;
1743
1744 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1745 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1746 }
1747}
1748
1749struct ByteChunks<'a>(rope::Chunks<'a>);
1750
1751impl<'a> Iterator for ByteChunks<'a> {
1752 type Item = &'a [u8];
1753
1754 fn next(&mut self) -> Option<Self::Item> {
1755 self.0.next().map(str::as_bytes)
1756 }
1757}
1758
1759unsafe impl<'a> Send for BufferChunks<'a> {}
1760
1761impl<'a> BufferChunks<'a> {
1762 pub fn seek(&mut self, offset: usize) {
1763 self.range.start = offset;
1764 self.chunks.seek(self.range.start);
1765 if let Some(highlights) = self.highlights.as_mut() {
1766 highlights
1767 .stack
1768 .retain(|(end_offset, _)| *end_offset > offset);
1769 if let Some((mat, capture_ix)) = &highlights.next_capture {
1770 let capture = mat.captures[*capture_ix as usize];
1771 if offset >= capture.node.start_byte() {
1772 let next_capture_end = capture.node.end_byte();
1773 if offset < next_capture_end {
1774 highlights.stack.push((
1775 next_capture_end,
1776 highlights.highlight_map.get(capture.index),
1777 ));
1778 }
1779 highlights.next_capture.take();
1780 }
1781 }
1782 highlights.captures.set_byte_range(self.range.clone());
1783 }
1784 }
1785
1786 pub fn offset(&self) -> usize {
1787 self.range.start
1788 }
1789
1790 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1791 let depth = match endpoint.severity {
1792 DiagnosticSeverity::ERROR => &mut self.error_depth,
1793 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1794 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1795 DiagnosticSeverity::HINT => &mut self.hint_depth,
1796 _ => return,
1797 };
1798 if endpoint.is_start {
1799 *depth += 1;
1800 } else {
1801 *depth -= 1;
1802 }
1803 }
1804
1805 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1806 if self.error_depth > 0 {
1807 Some(DiagnosticSeverity::ERROR)
1808 } else if self.warning_depth > 0 {
1809 Some(DiagnosticSeverity::WARNING)
1810 } else if self.information_depth > 0 {
1811 Some(DiagnosticSeverity::INFORMATION)
1812 } else if self.hint_depth > 0 {
1813 Some(DiagnosticSeverity::HINT)
1814 } else {
1815 None
1816 }
1817 }
1818}
1819
1820impl<'a> Iterator for BufferChunks<'a> {
1821 type Item = Chunk<'a>;
1822
1823 fn next(&mut self) -> Option<Self::Item> {
1824 let mut next_capture_start = usize::MAX;
1825 let mut next_diagnostic_endpoint = usize::MAX;
1826
1827 if let Some(highlights) = self.highlights.as_mut() {
1828 while let Some((parent_capture_end, _)) = highlights.stack.last() {
1829 if *parent_capture_end <= self.range.start {
1830 highlights.stack.pop();
1831 } else {
1832 break;
1833 }
1834 }
1835
1836 if highlights.next_capture.is_none() {
1837 highlights.next_capture = highlights.captures.next();
1838 }
1839
1840 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
1841 let capture = mat.captures[*capture_ix as usize];
1842 if self.range.start < capture.node.start_byte() {
1843 next_capture_start = capture.node.start_byte();
1844 break;
1845 } else {
1846 let highlight_id = highlights.highlight_map.get(capture.index);
1847 highlights
1848 .stack
1849 .push((capture.node.end_byte(), highlight_id));
1850 highlights.next_capture = highlights.captures.next();
1851 }
1852 }
1853 }
1854
1855 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
1856 if endpoint.offset <= self.range.start {
1857 self.update_diagnostic_depths(endpoint);
1858 self.diagnostic_endpoints.next();
1859 } else {
1860 next_diagnostic_endpoint = endpoint.offset;
1861 break;
1862 }
1863 }
1864
1865 if let Some(chunk) = self.chunks.peek() {
1866 let chunk_start = self.range.start;
1867 let mut chunk_end = (self.chunks.offset() + chunk.len())
1868 .min(next_capture_start)
1869 .min(next_diagnostic_endpoint);
1870 let mut highlight_style = None;
1871 if let Some(highlights) = self.highlights.as_ref() {
1872 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
1873 chunk_end = chunk_end.min(*parent_capture_end);
1874 highlight_style = parent_highlight_id.style(highlights.theme);
1875 }
1876 }
1877
1878 let slice =
1879 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
1880 self.range.start = chunk_end;
1881 if self.range.start == self.chunks.offset() + chunk.len() {
1882 self.chunks.next().unwrap();
1883 }
1884
1885 Some(Chunk {
1886 text: slice,
1887 highlight_style,
1888 diagnostic: self.current_diagnostic_severity(),
1889 })
1890 } else {
1891 None
1892 }
1893 }
1894}
1895
1896impl QueryCursorHandle {
1897 fn new() -> Self {
1898 QueryCursorHandle(Some(
1899 QUERY_CURSORS
1900 .lock()
1901 .pop()
1902 .unwrap_or_else(|| QueryCursor::new()),
1903 ))
1904 }
1905}
1906
1907impl Deref for QueryCursorHandle {
1908 type Target = QueryCursor;
1909
1910 fn deref(&self) -> &Self::Target {
1911 self.0.as_ref().unwrap()
1912 }
1913}
1914
1915impl DerefMut for QueryCursorHandle {
1916 fn deref_mut(&mut self) -> &mut Self::Target {
1917 self.0.as_mut().unwrap()
1918 }
1919}
1920
1921impl Drop for QueryCursorHandle {
1922 fn drop(&mut self) {
1923 let mut cursor = self.0.take().unwrap();
1924 cursor.set_byte_range(0..usize::MAX);
1925 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
1926 QUERY_CURSORS.lock().push(cursor)
1927 }
1928}
1929
1930trait ToTreeSitterPoint {
1931 fn to_ts_point(self) -> tree_sitter::Point;
1932 fn from_ts_point(point: tree_sitter::Point) -> Self;
1933}
1934
1935impl ToTreeSitterPoint for Point {
1936 fn to_ts_point(self) -> tree_sitter::Point {
1937 tree_sitter::Point::new(self.row as usize, self.column as usize)
1938 }
1939
1940 fn from_ts_point(point: tree_sitter::Point) -> Self {
1941 Point::new(point.row as u32, point.column as u32)
1942 }
1943}
1944
1945trait ToPointUtf16 {
1946 fn to_point_utf16(self) -> PointUtf16;
1947}
1948
1949impl ToPointUtf16 for lsp::Position {
1950 fn to_point_utf16(self) -> PointUtf16 {
1951 PointUtf16::new(self.line, self.character)
1952 }
1953}
1954
1955impl operation_queue::Operation for Operation {
1956 fn lamport_timestamp(&self) -> clock::Lamport {
1957 match self {
1958 Operation::Buffer(_) => {
1959 unreachable!("buffer operations should never be deferred at this layer")
1960 }
1961 Operation::UpdateDiagnostics {
1962 lamport_timestamp, ..
1963 }
1964 | Operation::UpdateSelections {
1965 lamport_timestamp, ..
1966 }
1967 | Operation::RemoveSelections {
1968 lamport_timestamp, ..
1969 } => *lamport_timestamp,
1970 }
1971 }
1972}
1973
1974fn diagnostic_ranges<'a>(
1975 diagnostic: &'a lsp::Diagnostic,
1976 abs_path: Option<&'a Path>,
1977) -> impl 'a + Iterator<Item = Range<PointUtf16>> {
1978 diagnostic
1979 .related_information
1980 .iter()
1981 .flatten()
1982 .filter_map(move |info| {
1983 if info.location.uri.to_file_path().ok()? == abs_path? {
1984 let info_start = PointUtf16::new(
1985 info.location.range.start.line,
1986 info.location.range.start.character,
1987 );
1988 let info_end = PointUtf16::new(
1989 info.location.range.end.line,
1990 info.location.range.end.character,
1991 );
1992 Some(info_start..info_end)
1993 } else {
1994 None
1995 }
1996 })
1997 .chain(Some(
1998 diagnostic.range.start.to_point_utf16()..diagnostic.range.end.to_point_utf16(),
1999 ))
2000}
2001
2002pub fn contiguous_ranges(
2003 values: impl Iterator<Item = u32>,
2004 max_len: usize,
2005) -> impl Iterator<Item = Range<u32>> {
2006 let mut values = values.into_iter();
2007 let mut current_range: Option<Range<u32>> = None;
2008 std::iter::from_fn(move || loop {
2009 if let Some(value) = values.next() {
2010 if let Some(range) = &mut current_range {
2011 if value == range.end && range.len() < max_len {
2012 range.end += 1;
2013 continue;
2014 }
2015 }
2016
2017 let prev_range = current_range.clone();
2018 current_range = Some(value..(value + 1));
2019 if prev_range.is_some() {
2020 return prev_range;
2021 }
2022 } else {
2023 return current_range.take();
2024 }
2025 })
2026}