1use crate::diagnostic_set::{DiagnosticEntry, DiagnosticGroup};
2pub use crate::{
3 diagnostic_set::DiagnosticSet,
4 highlight_map::{HighlightId, HighlightMap},
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, LanguageServerConfig,
6 PLAIN_TEXT,
7};
8use anyhow::{anyhow, Result};
9use clock::ReplicaId;
10use collections::hash_map;
11use futures::FutureExt as _;
12use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, MutableAppContext, Task};
13use lazy_static::lazy_static;
14use lsp::LanguageServer;
15use parking_lot::Mutex;
16use postage::{prelude::Stream, sink::Sink, watch};
17use similar::{ChangeTag, TextDiff};
18use smol::future::yield_now;
19use std::{
20 any::Any,
21 cell::RefCell,
22 cmp::{self, Ordering},
23 collections::{BTreeMap, HashMap, HashSet},
24 ffi::OsString,
25 future::Future,
26 iter::{Iterator, Peekable},
27 ops::{Deref, DerefMut, Range},
28 path::{Path, PathBuf},
29 str,
30 sync::Arc,
31 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
32 vec,
33};
34use sum_tree::TreeMap;
35use text::operation_queue::OperationQueue;
36pub use text::{Buffer as TextBuffer, Operation as _, *};
37use theme::SyntaxTheme;
38use tree_sitter::{InputEdit, Parser, QueryCursor, Tree};
39use util::{post_inc, TryFutureExt as _};
40
41#[cfg(any(test, feature = "test-support"))]
42pub use tree_sitter_rust;
43
44pub use lsp::DiagnosticSeverity;
45
46thread_local! {
47 static PARSER: RefCell<Parser> = RefCell::new(Parser::new());
48}
49
50lazy_static! {
51 static ref QUERY_CURSORS: Mutex<Vec<QueryCursor>> = Default::default();
52}
53
54// TODO - Make this configurable
55const INDENT_SIZE: u32 = 4;
56
57pub struct Buffer {
58 text: TextBuffer,
59 file: Option<Box<dyn File>>,
60 saved_version: clock::Global,
61 saved_mtime: SystemTime,
62 language: Option<Arc<Language>>,
63 autoindent_requests: Vec<Arc<AutoindentRequest>>,
64 pending_autoindent: Option<Task<()>>,
65 sync_parse_timeout: Duration,
66 syntax_tree: Mutex<Option<SyntaxTree>>,
67 parsing_in_background: bool,
68 parse_count: usize,
69 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
70 diagnostics: DiagnosticSet,
71 diagnostics_update_count: usize,
72 clear_invalid_diagnostics_task: Option<Task<()>>,
73 next_diagnostic_group_id: usize,
74 language_server: Option<LanguageServerState>,
75 deferred_ops: OperationQueue<Operation>,
76 #[cfg(test)]
77 pub(crate) operations: Vec<Operation>,
78}
79
80pub struct BufferSnapshot {
81 text: text::BufferSnapshot,
82 tree: Option<Tree>,
83 diagnostics: DiagnosticSet,
84 remote_selections: TreeMap<ReplicaId, Arc<[Selection<Anchor>]>>,
85 diagnostics_update_count: usize,
86 is_parsing: bool,
87 language: Option<Arc<Language>>,
88 parse_count: usize,
89}
90
91#[derive(Clone, Debug, PartialEq, Eq)]
92pub struct Diagnostic {
93 pub source: Option<String>,
94 pub code: Option<String>,
95 pub severity: DiagnosticSeverity,
96 pub message: String,
97 pub group_id: usize,
98 pub is_valid: bool,
99 pub is_primary: bool,
100}
101
102struct LanguageServerState {
103 server: Arc<LanguageServer>,
104 latest_snapshot: watch::Sender<Option<LanguageServerSnapshot>>,
105 pending_snapshots: BTreeMap<usize, LanguageServerSnapshot>,
106 next_version: usize,
107 _maintain_server: Task<Option<()>>,
108}
109
110#[derive(Clone)]
111struct LanguageServerSnapshot {
112 buffer_snapshot: text::BufferSnapshot,
113 version: usize,
114 path: Arc<Path>,
115}
116
117#[derive(Clone, Debug)]
118pub enum Operation {
119 Buffer(text::Operation),
120 UpdateDiagnostics {
121 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
122 lamport_timestamp: clock::Lamport,
123 },
124 UpdateSelections {
125 replica_id: ReplicaId,
126 selections: Arc<[Selection<Anchor>]>,
127 lamport_timestamp: clock::Lamport,
128 },
129 RemoveSelections {
130 replica_id: ReplicaId,
131 lamport_timestamp: clock::Lamport,
132 },
133}
134
135#[derive(Clone, Debug, Eq, PartialEq)]
136pub enum Event {
137 Edited,
138 Dirtied,
139 Saved,
140 FileHandleChanged,
141 Reloaded,
142 Reparsed,
143 DiagnosticsUpdated,
144 Closed,
145}
146
147pub trait File {
148 fn worktree_id(&self) -> usize;
149
150 fn entry_id(&self) -> Option<usize>;
151
152 fn mtime(&self) -> SystemTime;
153
154 /// Returns the path of this file relative to the worktree's root directory.
155 fn path(&self) -> &Arc<Path>;
156
157 /// Returns the absolute path of this file.
158 fn abs_path(&self) -> Option<PathBuf>;
159
160 /// Returns the path of this file relative to the worktree's parent directory (this means it
161 /// includes the name of the worktree's root folder).
162 fn full_path(&self) -> PathBuf;
163
164 /// Returns the last component of this handle's absolute path. If this handle refers to the root
165 /// of its worktree, then this method will return the name of the worktree itself.
166 fn file_name(&self) -> Option<OsString>;
167
168 fn is_deleted(&self) -> bool;
169
170 fn save(
171 &self,
172 buffer_id: u64,
173 text: Rope,
174 version: clock::Global,
175 cx: &mut MutableAppContext,
176 ) -> Task<Result<(clock::Global, SystemTime)>>;
177
178 fn load_local(&self, cx: &AppContext) -> Option<Task<Result<String>>>;
179
180 fn buffer_updated(&self, buffer_id: u64, operation: Operation, cx: &mut MutableAppContext);
181
182 fn buffer_removed(&self, buffer_id: u64, cx: &mut MutableAppContext);
183
184 fn boxed_clone(&self) -> Box<dyn File>;
185
186 fn as_any(&self) -> &dyn Any;
187}
188
189struct QueryCursorHandle(Option<QueryCursor>);
190
191#[derive(Clone)]
192struct SyntaxTree {
193 tree: Tree,
194 version: clock::Global,
195}
196
197#[derive(Clone)]
198struct AutoindentRequest {
199 before_edit: BufferSnapshot,
200 edited: Vec<Anchor>,
201 inserted: Option<Vec<Range<Anchor>>>,
202}
203
204#[derive(Debug)]
205struct IndentSuggestion {
206 basis_row: u32,
207 indent: bool,
208}
209
210struct TextProvider<'a>(&'a Rope);
211
212struct BufferChunkHighlights<'a> {
213 captures: tree_sitter::QueryCaptures<'a, 'a, TextProvider<'a>>,
214 next_capture: Option<(tree_sitter::QueryMatch<'a, 'a>, usize)>,
215 stack: Vec<(usize, HighlightId)>,
216 highlight_map: HighlightMap,
217 theme: &'a SyntaxTheme,
218 _query_cursor: QueryCursorHandle,
219}
220
221pub struct BufferChunks<'a> {
222 range: Range<usize>,
223 chunks: rope::Chunks<'a>,
224 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
225 error_depth: usize,
226 warning_depth: usize,
227 information_depth: usize,
228 hint_depth: usize,
229 highlights: Option<BufferChunkHighlights<'a>>,
230}
231
232#[derive(Clone, Copy, Debug, Default)]
233pub struct Chunk<'a> {
234 pub text: &'a str,
235 pub highlight_style: Option<HighlightStyle>,
236 pub diagnostic: Option<DiagnosticSeverity>,
237}
238
239pub(crate) struct Diff {
240 base_version: clock::Global,
241 new_text: Arc<str>,
242 changes: Vec<(ChangeTag, usize)>,
243}
244
245#[derive(Clone, Copy)]
246struct DiagnosticEndpoint {
247 offset: usize,
248 is_start: bool,
249 severity: DiagnosticSeverity,
250}
251
252impl Buffer {
253 pub fn new<T: Into<Arc<str>>>(
254 replica_id: ReplicaId,
255 base_text: T,
256 cx: &mut ModelContext<Self>,
257 ) -> Self {
258 Self::build(
259 TextBuffer::new(
260 replica_id,
261 cx.model_id() as u64,
262 History::new(base_text.into()),
263 ),
264 None,
265 )
266 }
267
268 pub fn from_file<T: Into<Arc<str>>>(
269 replica_id: ReplicaId,
270 base_text: T,
271 file: Box<dyn File>,
272 cx: &mut ModelContext<Self>,
273 ) -> Self {
274 Self::build(
275 TextBuffer::new(
276 replica_id,
277 cx.model_id() as u64,
278 History::new(base_text.into()),
279 ),
280 Some(file),
281 )
282 }
283
284 pub fn from_proto(
285 replica_id: ReplicaId,
286 message: proto::Buffer,
287 file: Option<Box<dyn File>>,
288 cx: &mut ModelContext<Self>,
289 ) -> Result<Self> {
290 let mut buffer =
291 text::Buffer::new(replica_id, message.id, History::new(message.content.into()));
292 let ops = message
293 .history
294 .into_iter()
295 .map(|op| text::Operation::Edit(proto::deserialize_edit_operation(op)));
296 buffer.apply_ops(ops)?;
297 let mut this = Self::build(buffer, file);
298 for selection_set in message.selections {
299 this.remote_selections.insert(
300 selection_set.replica_id as ReplicaId,
301 proto::deserialize_selections(selection_set.selections),
302 );
303 }
304 this.apply_diagnostic_update(
305 Arc::from(proto::deserialize_diagnostics(message.diagnostics)),
306 cx,
307 );
308
309 Ok(this)
310 }
311
312 pub fn to_proto(&self) -> proto::Buffer {
313 proto::Buffer {
314 id: self.remote_id(),
315 content: self.text.base_text().to_string(),
316 history: self
317 .text
318 .history()
319 .map(proto::serialize_edit_operation)
320 .collect(),
321 selections: self
322 .remote_selections
323 .iter()
324 .map(|(replica_id, selections)| proto::SelectionSet {
325 replica_id: *replica_id as u32,
326 selections: proto::serialize_selections(selections),
327 })
328 .collect(),
329 diagnostics: proto::serialize_diagnostics(self.diagnostics.iter()),
330 }
331 }
332
333 pub fn with_language(
334 mut self,
335 language: Option<Arc<Language>>,
336 language_server: Option<Arc<LanguageServer>>,
337 cx: &mut ModelContext<Self>,
338 ) -> Self {
339 self.set_language(language, language_server, cx);
340 self
341 }
342
343 fn build(buffer: TextBuffer, file: Option<Box<dyn File>>) -> Self {
344 let saved_mtime;
345 if let Some(file) = file.as_ref() {
346 saved_mtime = file.mtime();
347 } else {
348 saved_mtime = UNIX_EPOCH;
349 }
350
351 Self {
352 saved_mtime,
353 saved_version: buffer.version(),
354 text: buffer,
355 file,
356 syntax_tree: Mutex::new(None),
357 parsing_in_background: false,
358 parse_count: 0,
359 sync_parse_timeout: Duration::from_millis(1),
360 autoindent_requests: Default::default(),
361 pending_autoindent: Default::default(),
362 language: None,
363 remote_selections: Default::default(),
364 diagnostics: Default::default(),
365 diagnostics_update_count: 0,
366 next_diagnostic_group_id: 0,
367 clear_invalid_diagnostics_task: None,
368 language_server: None,
369 deferred_ops: OperationQueue::new(),
370 #[cfg(test)]
371 operations: Default::default(),
372 }
373 }
374
375 pub fn snapshot(&self) -> BufferSnapshot {
376 BufferSnapshot {
377 text: self.text.snapshot(),
378 tree: self.syntax_tree(),
379 remote_selections: self.remote_selections.clone(),
380 diagnostics: self.diagnostics.clone(),
381 diagnostics_update_count: self.diagnostics_update_count,
382 is_parsing: self.parsing_in_background,
383 language: self.language.clone(),
384 parse_count: self.parse_count,
385 }
386 }
387
388 pub fn file(&self) -> Option<&dyn File> {
389 self.file.as_deref()
390 }
391
392 pub fn save(
393 &mut self,
394 cx: &mut ModelContext<Self>,
395 ) -> Result<Task<Result<(clock::Global, SystemTime)>>> {
396 let file = self
397 .file
398 .as_ref()
399 .ok_or_else(|| anyhow!("buffer has no file"))?;
400 let text = self.as_rope().clone();
401 let version = self.version();
402 let save = file.save(self.remote_id(), text, version, cx.as_mut());
403 Ok(cx.spawn(|this, mut cx| async move {
404 let (version, mtime) = save.await?;
405 this.update(&mut cx, |this, cx| {
406 this.did_save(version.clone(), mtime, None, cx);
407 });
408 Ok((version, mtime))
409 }))
410 }
411
412 pub fn set_language(
413 &mut self,
414 language: Option<Arc<Language>>,
415 language_server: Option<Arc<lsp::LanguageServer>>,
416 cx: &mut ModelContext<Self>,
417 ) {
418 self.language = language;
419 self.language_server = if let Some(server) = language_server {
420 let (latest_snapshot_tx, mut latest_snapshot_rx) = watch::channel();
421 Some(LanguageServerState {
422 latest_snapshot: latest_snapshot_tx,
423 pending_snapshots: Default::default(),
424 next_version: 0,
425 server: server.clone(),
426 _maintain_server: cx.background().spawn(
427 async move {
428 let mut prev_snapshot: Option<LanguageServerSnapshot> = None;
429 while let Some(snapshot) = latest_snapshot_rx.recv().await {
430 if let Some(snapshot) = snapshot {
431 let uri = lsp::Url::from_file_path(&snapshot.path).unwrap();
432 if let Some(prev_snapshot) = prev_snapshot {
433 let changes = lsp::DidChangeTextDocumentParams {
434 text_document: lsp::VersionedTextDocumentIdentifier::new(
435 uri,
436 snapshot.version as i32,
437 ),
438 content_changes: snapshot
439 .buffer_snapshot
440 .edits_since::<(PointUtf16, usize)>(
441 prev_snapshot.buffer_snapshot.version(),
442 )
443 .map(|edit| {
444 let edit_start = edit.new.start.0;
445 let edit_end = edit_start
446 + (edit.old.end.0 - edit.old.start.0);
447 let new_text = snapshot
448 .buffer_snapshot
449 .text_for_range(
450 edit.new.start.1..edit.new.end.1,
451 )
452 .collect();
453 lsp::TextDocumentContentChangeEvent {
454 range: Some(lsp::Range::new(
455 lsp::Position::new(
456 edit_start.row,
457 edit_start.column,
458 ),
459 lsp::Position::new(
460 edit_end.row,
461 edit_end.column,
462 ),
463 )),
464 range_length: None,
465 text: new_text,
466 }
467 })
468 .collect(),
469 };
470 server
471 .notify::<lsp::notification::DidChangeTextDocument>(changes)
472 .await?;
473 } else {
474 server
475 .notify::<lsp::notification::DidOpenTextDocument>(
476 lsp::DidOpenTextDocumentParams {
477 text_document: lsp::TextDocumentItem::new(
478 uri,
479 Default::default(),
480 snapshot.version as i32,
481 snapshot.buffer_snapshot.text().to_string(),
482 ),
483 },
484 )
485 .await?;
486 }
487
488 prev_snapshot = Some(snapshot);
489 }
490 }
491 Ok(())
492 }
493 .log_err(),
494 ),
495 })
496 } else {
497 None
498 };
499
500 self.reparse(cx);
501 self.update_language_server();
502 }
503
504 pub fn did_save(
505 &mut self,
506 version: clock::Global,
507 mtime: SystemTime,
508 new_file: Option<Box<dyn File>>,
509 cx: &mut ModelContext<Self>,
510 ) {
511 self.saved_mtime = mtime;
512 self.saved_version = version;
513 if let Some(new_file) = new_file {
514 self.file = Some(new_file);
515 }
516 if let Some(state) = &self.language_server {
517 cx.background()
518 .spawn(
519 state
520 .server
521 .notify::<lsp::notification::DidSaveTextDocument>(
522 lsp::DidSaveTextDocumentParams {
523 text_document: lsp::TextDocumentIdentifier {
524 uri: lsp::Url::from_file_path(
525 self.file.as_ref().unwrap().abs_path().unwrap(),
526 )
527 .unwrap(),
528 },
529 text: None,
530 },
531 ),
532 )
533 .detach()
534 }
535 cx.emit(Event::Saved);
536 }
537
538 pub fn file_updated(
539 &mut self,
540 new_file: Box<dyn File>,
541 cx: &mut ModelContext<Self>,
542 ) -> Option<Task<()>> {
543 let old_file = self.file.as_ref()?;
544 let mut file_changed = false;
545 let mut task = None;
546
547 if new_file.path() != old_file.path() {
548 file_changed = true;
549 }
550
551 if new_file.is_deleted() {
552 if !old_file.is_deleted() {
553 file_changed = true;
554 if !self.is_dirty() {
555 cx.emit(Event::Dirtied);
556 }
557 }
558 } else {
559 let new_mtime = new_file.mtime();
560 if new_mtime != old_file.mtime() {
561 file_changed = true;
562
563 if !self.is_dirty() {
564 task = Some(cx.spawn(|this, mut cx| {
565 async move {
566 let new_text = this.read_with(&cx, |this, cx| {
567 this.file.as_ref().and_then(|file| file.load_local(cx))
568 });
569 if let Some(new_text) = new_text {
570 let new_text = new_text.await?;
571 let diff = this
572 .read_with(&cx, |this, cx| this.diff(new_text.into(), cx))
573 .await;
574 this.update(&mut cx, |this, cx| {
575 if this.apply_diff(diff, cx) {
576 this.saved_version = this.version();
577 this.saved_mtime = new_mtime;
578 cx.emit(Event::Reloaded);
579 }
580 });
581 }
582 Ok(())
583 }
584 .log_err()
585 .map(drop)
586 }));
587 }
588 }
589 }
590
591 if file_changed {
592 cx.emit(Event::FileHandleChanged);
593 }
594 self.file = Some(new_file);
595 task
596 }
597
598 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
599 cx.emit(Event::Closed);
600 }
601
602 pub fn language(&self) -> Option<&Arc<Language>> {
603 self.language.as_ref()
604 }
605
606 pub fn parse_count(&self) -> usize {
607 self.parse_count
608 }
609
610 pub fn diagnostics_update_count(&self) -> usize {
611 self.diagnostics_update_count
612 }
613
614 pub(crate) fn syntax_tree(&self) -> Option<Tree> {
615 if let Some(syntax_tree) = self.syntax_tree.lock().as_mut() {
616 self.interpolate_tree(syntax_tree);
617 Some(syntax_tree.tree.clone())
618 } else {
619 None
620 }
621 }
622
623 #[cfg(any(test, feature = "test-support"))]
624 pub fn is_parsing(&self) -> bool {
625 self.parsing_in_background
626 }
627
628 #[cfg(test)]
629 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
630 self.sync_parse_timeout = timeout;
631 }
632
633 fn reparse(&mut self, cx: &mut ModelContext<Self>) -> bool {
634 if self.parsing_in_background {
635 return false;
636 }
637
638 if let Some(grammar) = self.grammar().cloned() {
639 let old_tree = self.syntax_tree();
640 let text = self.as_rope().clone();
641 let parsed_version = self.version();
642 let parse_task = cx.background().spawn({
643 let grammar = grammar.clone();
644 async move { Self::parse_text(&text, old_tree, &grammar) }
645 });
646
647 match cx
648 .background()
649 .block_with_timeout(self.sync_parse_timeout, parse_task)
650 {
651 Ok(new_tree) => {
652 self.did_finish_parsing(new_tree, parsed_version, cx);
653 return true;
654 }
655 Err(parse_task) => {
656 self.parsing_in_background = true;
657 cx.spawn(move |this, mut cx| async move {
658 let new_tree = parse_task.await;
659 this.update(&mut cx, move |this, cx| {
660 let grammar_changed = this
661 .grammar()
662 .map_or(true, |curr_grammar| !Arc::ptr_eq(&grammar, curr_grammar));
663 let parse_again = this.version.gt(&parsed_version) || grammar_changed;
664 this.parsing_in_background = false;
665 this.did_finish_parsing(new_tree, parsed_version, cx);
666
667 if parse_again && this.reparse(cx) {
668 return;
669 }
670 });
671 })
672 .detach();
673 }
674 }
675 }
676 false
677 }
678
679 fn parse_text(text: &Rope, old_tree: Option<Tree>, grammar: &Grammar) -> Tree {
680 PARSER.with(|parser| {
681 let mut parser = parser.borrow_mut();
682 parser
683 .set_language(grammar.ts_language)
684 .expect("incompatible grammar");
685 let mut chunks = text.chunks_in_range(0..text.len());
686 let tree = parser
687 .parse_with(
688 &mut move |offset, _| {
689 chunks.seek(offset);
690 chunks.next().unwrap_or("").as_bytes()
691 },
692 old_tree.as_ref(),
693 )
694 .unwrap();
695 tree
696 })
697 }
698
699 fn interpolate_tree(&self, tree: &mut SyntaxTree) {
700 for edit in self.edits_since::<(usize, Point)>(&tree.version) {
701 let (bytes, lines) = edit.flatten();
702 tree.tree.edit(&InputEdit {
703 start_byte: bytes.new.start,
704 old_end_byte: bytes.new.start + bytes.old.len(),
705 new_end_byte: bytes.new.end,
706 start_position: lines.new.start.to_ts_point(),
707 old_end_position: (lines.new.start + (lines.old.end - lines.old.start))
708 .to_ts_point(),
709 new_end_position: lines.new.end.to_ts_point(),
710 });
711 }
712 tree.version = self.version();
713 }
714
715 fn did_finish_parsing(
716 &mut self,
717 tree: Tree,
718 version: clock::Global,
719 cx: &mut ModelContext<Self>,
720 ) {
721 self.parse_count += 1;
722 *self.syntax_tree.lock() = Some(SyntaxTree { tree, version });
723 self.request_autoindent(cx);
724 cx.emit(Event::Reparsed);
725 cx.notify();
726 }
727
728 pub fn update_diagnostics(
729 &mut self,
730 version: Option<i32>,
731 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
732 cx: &mut ModelContext<Self>,
733 ) -> Result<Operation> {
734 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
735 Ordering::Equal
736 .then_with(|| b.is_primary.cmp(&a.is_primary))
737 .then_with(|| a.source.cmp(&b.source))
738 .then_with(|| a.severity.cmp(&b.severity))
739 .then_with(|| a.message.cmp(&b.message))
740 }
741
742 diagnostics.sort_unstable_by(|a, b| {
743 Ordering::Equal
744 .then_with(|| a.range.start.cmp(&b.range.start))
745 .then_with(|| b.range.end.cmp(&a.range.end))
746 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
747 });
748
749 let version = version.map(|version| version as usize);
750 let content = if let Some(version) = version {
751 let language_server = self.language_server.as_mut().unwrap();
752 language_server
753 .pending_snapshots
754 .retain(|&v, _| v >= version);
755 let snapshot = language_server
756 .pending_snapshots
757 .get(&version)
758 .ok_or_else(|| anyhow!("missing snapshot"))?;
759 &snapshot.buffer_snapshot
760 } else {
761 self.deref()
762 };
763
764 let empty_set = HashSet::new();
765 let disk_based_sources = self
766 .language
767 .as_ref()
768 .and_then(|language| language.disk_based_diagnostic_sources())
769 .unwrap_or(&empty_set);
770
771 let mut edits_since_save = content
772 .edits_since::<PointUtf16>(&self.saved_version)
773 .peekable();
774 let mut last_edit_old_end = PointUtf16::zero();
775 let mut last_edit_new_end = PointUtf16::zero();
776 let mut has_disk_based_diagnostics = false;
777 let mut ix = 0;
778 'outer: while ix < diagnostics.len() {
779 let entry = &mut diagnostics[ix];
780 let mut start = entry.range.start;
781 let mut end = entry.range.end;
782
783 // Some diagnostics are based on files on disk instead of buffers'
784 // current contents. Adjust these diagnostics' ranges to reflect
785 // any unsaved edits.
786 if entry
787 .diagnostic
788 .source
789 .as_ref()
790 .map_or(false, |source| disk_based_sources.contains(source))
791 {
792 has_disk_based_diagnostics = true;
793 while let Some(edit) = edits_since_save.peek() {
794 if edit.old.end <= start {
795 last_edit_old_end = edit.old.end;
796 last_edit_new_end = edit.new.end;
797 edits_since_save.next();
798 } else if edit.old.start <= end && edit.old.end >= start {
799 diagnostics.remove(ix);
800 continue 'outer;
801 } else {
802 break;
803 }
804 }
805
806 start = last_edit_new_end + (start - last_edit_old_end);
807 end = last_edit_new_end + (end - last_edit_old_end);
808 }
809
810 entry.range = content.clip_point_utf16(start, Bias::Left)
811 ..content.clip_point_utf16(end, Bias::Right);
812
813 // Expand empty ranges by one character
814 if entry.range.start == entry.range.end {
815 entry.range.end.column += 1;
816 entry.range.end = content.clip_point_utf16(entry.range.end, Bias::Right);
817 if entry.range.start == entry.range.end && entry.range.end.column > 0 {
818 entry.range.start.column -= 1;
819 entry.range.start = content.clip_point_utf16(entry.range.start, Bias::Left);
820 }
821 }
822 ix += 1;
823 }
824 drop(edits_since_save);
825
826 let mut merged_diagnostics = Vec::with_capacity(diagnostics.len());
827 let mut old_diagnostics = self
828 .diagnostics
829 .iter()
830 .map(|entry| {
831 (
832 entry,
833 entry
834 .diagnostic
835 .source
836 .as_ref()
837 .map_or(false, |source| disk_based_sources.contains(source)),
838 )
839 })
840 .peekable();
841 let mut new_diagnostics = diagnostics
842 .into_iter()
843 .map(|entry| DiagnosticEntry {
844 range: content.anchor_before(entry.range.start)
845 ..content.anchor_after(entry.range.end),
846 diagnostic: entry.diagnostic,
847 })
848 .peekable();
849
850 // Incorporate the *old* diagnostics into the new diagnostics set, in two ways:
851 // 1. Recycle group ids - diagnostic groups whose primary diagnostic has not
852 // changed should use the same group id as before, so that downstream code
853 // can determine which diagnostics are new.
854 // 2. Preserve disk-based diagnostics - Some diagnostic sources are reported
855 // on a less frequent basis than others. If these sources are absent from this
856 // message, then preserve the previous diagnostics for those sources, but mark
857 // them as invalid, and set a timer to clear them out.
858 let mut group_id_replacements = HashMap::new();
859 let mut merged_old_disk_based_diagnostics = false;
860 loop {
861 match (old_diagnostics.peek(), new_diagnostics.peek()) {
862 (None, None) => break,
863 (None, Some(_)) => {
864 merged_diagnostics.push(new_diagnostics.next().unwrap());
865 }
866 (Some(_), None) => {
867 let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
868 if is_disk_based && !has_disk_based_diagnostics {
869 let mut old_entry = old_entry.clone();
870 old_entry.diagnostic.is_valid = false;
871 merged_old_disk_based_diagnostics = true;
872 merged_diagnostics.push(old_entry);
873 }
874 }
875 (Some((old, _)), Some(new)) => {
876 let ordering = Ordering::Equal
877 .then_with(|| old.range.start.cmp(&new.range.start, content).unwrap())
878 .then_with(|| new.range.end.cmp(&old.range.end, content).unwrap())
879 .then_with(|| compare_diagnostics(&old.diagnostic, &new.diagnostic));
880 match ordering {
881 Ordering::Less => {
882 let (old_entry, is_disk_based) = old_diagnostics.next().unwrap();
883 if is_disk_based && !has_disk_based_diagnostics {
884 let mut old_entry = old_entry.clone();
885 old_entry.diagnostic.is_valid = false;
886 merged_old_disk_based_diagnostics = true;
887 merged_diagnostics.push(old_entry);
888 }
889 }
890 Ordering::Equal => {
891 let (old_entry, _) = old_diagnostics.next().unwrap();
892 let new_entry = new_diagnostics.next().unwrap();
893 if new_entry.diagnostic.is_primary {
894 group_id_replacements.insert(
895 new_entry.diagnostic.group_id,
896 old_entry.diagnostic.group_id,
897 );
898 }
899 merged_diagnostics.push(new_entry);
900 }
901 Ordering::Greater => {
902 let new_entry = new_diagnostics.next().unwrap();
903 merged_diagnostics.push(new_entry);
904 }
905 }
906 }
907 }
908 }
909 drop(old_diagnostics);
910
911 // Having determined which group ids should be recycled, renumber all of
912 // groups. Any new group that does not correspond to an old group receives
913 // a brand new group id.
914 let mut next_diagnostic_group_id = self.next_diagnostic_group_id;
915 for entry in &mut merged_diagnostics {
916 if entry.diagnostic.is_valid {
917 match group_id_replacements.entry(entry.diagnostic.group_id) {
918 hash_map::Entry::Occupied(e) => entry.diagnostic.group_id = *e.get(),
919 hash_map::Entry::Vacant(e) => {
920 entry.diagnostic.group_id = post_inc(&mut next_diagnostic_group_id);
921 e.insert(entry.diagnostic.group_id);
922 }
923 }
924 }
925 }
926
927 self.diagnostics = DiagnosticSet::from_sorted_entries(merged_diagnostics, content);
928 self.next_diagnostic_group_id = next_diagnostic_group_id;
929
930 // If old disk-based diagnostics were included in this new set, then
931 // set a timer to remove them if enough time passes before the next
932 // diagnostics update.
933 if merged_old_disk_based_diagnostics {
934 self.clear_invalid_diagnostics_task = Some(cx.spawn(|this, mut cx| async move {
935 smol::Timer::after(Duration::from_secs(2)).await;
936 this.update(&mut cx, |this, cx| {
937 let content = this.snapshot();
938 this.diagnostics = DiagnosticSet::from_sorted_entries(
939 this.diagnostics
940 .iter()
941 .filter(|d| d.diagnostic.is_valid)
942 .cloned(),
943 &content,
944 );
945 let operation = this.did_update_diagnostics(cx);
946 this.send_operation(operation, cx);
947 });
948 }));
949 } else if has_disk_based_diagnostics {
950 self.clear_invalid_diagnostics_task.take();
951 }
952
953 Ok(self.did_update_diagnostics(cx))
954 }
955
956 fn did_update_diagnostics(&mut self, cx: &mut ModelContext<Self>) -> Operation {
957 self.diagnostics_update_count += 1;
958 cx.notify();
959 cx.emit(Event::DiagnosticsUpdated);
960 Operation::UpdateDiagnostics {
961 diagnostics: Arc::from(self.diagnostics.iter().cloned().collect::<Vec<_>>()),
962 lamport_timestamp: self.text.lamport_clock.tick(),
963 }
964 }
965
966 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
967 if let Some(indent_columns) = self.compute_autoindents() {
968 let indent_columns = cx.background().spawn(indent_columns);
969 match cx
970 .background()
971 .block_with_timeout(Duration::from_micros(500), indent_columns)
972 {
973 Ok(indent_columns) => self.apply_autoindents(indent_columns, cx),
974 Err(indent_columns) => {
975 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
976 let indent_columns = indent_columns.await;
977 this.update(&mut cx, |this, cx| {
978 this.apply_autoindents(indent_columns, cx);
979 });
980 }));
981 }
982 }
983 }
984 }
985
986 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, u32>>> {
987 let max_rows_between_yields = 100;
988 let snapshot = self.snapshot();
989 if snapshot.language.is_none()
990 || snapshot.tree.is_none()
991 || self.autoindent_requests.is_empty()
992 {
993 return None;
994 }
995
996 let autoindent_requests = self.autoindent_requests.clone();
997 Some(async move {
998 let mut indent_columns = BTreeMap::new();
999 for request in autoindent_requests {
1000 let old_to_new_rows = request
1001 .edited
1002 .iter()
1003 .map(|anchor| anchor.summary::<Point>(&request.before_edit).row)
1004 .zip(
1005 request
1006 .edited
1007 .iter()
1008 .map(|anchor| anchor.summary::<Point>(&snapshot).row),
1009 )
1010 .collect::<BTreeMap<u32, u32>>();
1011
1012 let mut old_suggestions = HashMap::<u32, u32>::default();
1013 let old_edited_ranges =
1014 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1015 for old_edited_range in old_edited_ranges {
1016 let suggestions = request
1017 .before_edit
1018 .suggest_autoindents(old_edited_range.clone())
1019 .into_iter()
1020 .flatten();
1021 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1022 let indentation_basis = old_to_new_rows
1023 .get(&suggestion.basis_row)
1024 .and_then(|from_row| old_suggestions.get(from_row).copied())
1025 .unwrap_or_else(|| {
1026 request
1027 .before_edit
1028 .indent_column_for_line(suggestion.basis_row)
1029 });
1030 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1031 old_suggestions.insert(
1032 *old_to_new_rows.get(&old_row).unwrap(),
1033 indentation_basis + delta,
1034 );
1035 }
1036 yield_now().await;
1037 }
1038
1039 // At this point, old_suggestions contains the suggested indentation for all edited lines with respect to the state of the
1040 // buffer before the edit, but keyed by the row for these lines after the edits were applied.
1041 let new_edited_row_ranges =
1042 contiguous_ranges(old_to_new_rows.values().copied(), max_rows_between_yields);
1043 for new_edited_row_range in new_edited_row_ranges {
1044 let suggestions = snapshot
1045 .suggest_autoindents(new_edited_row_range.clone())
1046 .into_iter()
1047 .flatten();
1048 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1049 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1050 let new_indentation = indent_columns
1051 .get(&suggestion.basis_row)
1052 .copied()
1053 .unwrap_or_else(|| {
1054 snapshot.indent_column_for_line(suggestion.basis_row)
1055 })
1056 + delta;
1057 if old_suggestions
1058 .get(&new_row)
1059 .map_or(true, |old_indentation| new_indentation != *old_indentation)
1060 {
1061 indent_columns.insert(new_row, new_indentation);
1062 }
1063 }
1064 yield_now().await;
1065 }
1066
1067 if let Some(inserted) = request.inserted.as_ref() {
1068 let inserted_row_ranges = contiguous_ranges(
1069 inserted
1070 .iter()
1071 .map(|range| range.to_point(&snapshot))
1072 .flat_map(|range| range.start.row..range.end.row + 1),
1073 max_rows_between_yields,
1074 );
1075 for inserted_row_range in inserted_row_ranges {
1076 let suggestions = snapshot
1077 .suggest_autoindents(inserted_row_range.clone())
1078 .into_iter()
1079 .flatten();
1080 for (row, suggestion) in inserted_row_range.zip(suggestions) {
1081 let delta = if suggestion.indent { INDENT_SIZE } else { 0 };
1082 let new_indentation = indent_columns
1083 .get(&suggestion.basis_row)
1084 .copied()
1085 .unwrap_or_else(|| {
1086 snapshot.indent_column_for_line(suggestion.basis_row)
1087 })
1088 + delta;
1089 indent_columns.insert(row, new_indentation);
1090 }
1091 yield_now().await;
1092 }
1093 }
1094 }
1095 indent_columns
1096 })
1097 }
1098
1099 fn apply_autoindents(
1100 &mut self,
1101 indent_columns: BTreeMap<u32, u32>,
1102 cx: &mut ModelContext<Self>,
1103 ) {
1104 self.start_transaction();
1105 for (row, indent_column) in &indent_columns {
1106 self.set_indent_column_for_line(*row, *indent_column, cx);
1107 }
1108 self.end_transaction(cx);
1109 }
1110
1111 fn set_indent_column_for_line(&mut self, row: u32, column: u32, cx: &mut ModelContext<Self>) {
1112 let current_column = self.indent_column_for_line(row);
1113 if column > current_column {
1114 let offset = Point::new(row, 0).to_offset(&*self);
1115 self.edit(
1116 [offset..offset],
1117 " ".repeat((column - current_column) as usize),
1118 cx,
1119 );
1120 } else if column < current_column {
1121 self.edit(
1122 [Point::new(row, 0)..Point::new(row, current_column - column)],
1123 "",
1124 cx,
1125 );
1126 }
1127 }
1128
1129 pub(crate) fn diff(&self, new_text: Arc<str>, cx: &AppContext) -> Task<Diff> {
1130 // TODO: it would be nice to not allocate here.
1131 let old_text = self.text();
1132 let base_version = self.version();
1133 cx.background().spawn(async move {
1134 let changes = TextDiff::from_lines(old_text.as_str(), new_text.as_ref())
1135 .iter_all_changes()
1136 .map(|c| (c.tag(), c.value().len()))
1137 .collect::<Vec<_>>();
1138 Diff {
1139 base_version,
1140 new_text,
1141 changes,
1142 }
1143 })
1144 }
1145
1146 pub(crate) fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> bool {
1147 if self.version == diff.base_version {
1148 self.start_transaction();
1149 let mut offset = 0;
1150 for (tag, len) in diff.changes {
1151 let range = offset..(offset + len);
1152 match tag {
1153 ChangeTag::Equal => offset += len,
1154 ChangeTag::Delete => self.edit(Some(range), "", cx),
1155 ChangeTag::Insert => {
1156 self.edit(Some(offset..offset), &diff.new_text[range], cx);
1157 offset += len;
1158 }
1159 }
1160 }
1161 self.end_transaction(cx);
1162 true
1163 } else {
1164 false
1165 }
1166 }
1167
1168 pub fn is_dirty(&self) -> bool {
1169 !self.saved_version.ge(&self.version)
1170 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1171 }
1172
1173 pub fn has_conflict(&self) -> bool {
1174 !self.saved_version.ge(&self.version)
1175 && self
1176 .file
1177 .as_ref()
1178 .map_or(false, |file| file.mtime() > self.saved_mtime)
1179 }
1180
1181 pub fn subscribe(&mut self) -> Subscription {
1182 self.text.subscribe()
1183 }
1184
1185 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1186 self.start_transaction_at(Instant::now())
1187 }
1188
1189 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1190 self.text.start_transaction_at(now)
1191 }
1192
1193 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1194 self.end_transaction_at(Instant::now(), cx)
1195 }
1196
1197 pub fn end_transaction_at(
1198 &mut self,
1199 now: Instant,
1200 cx: &mut ModelContext<Self>,
1201 ) -> Option<TransactionId> {
1202 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1203 let was_dirty = start_version != self.saved_version;
1204 self.did_edit(&start_version, was_dirty, cx);
1205 Some(transaction_id)
1206 } else {
1207 None
1208 }
1209 }
1210
1211 pub fn set_active_selections(
1212 &mut self,
1213 selections: Arc<[Selection<Anchor>]>,
1214 cx: &mut ModelContext<Self>,
1215 ) {
1216 let lamport_timestamp = self.text.lamport_clock.tick();
1217 self.remote_selections
1218 .insert(self.text.replica_id(), selections.clone());
1219 self.send_operation(
1220 Operation::UpdateSelections {
1221 replica_id: self.text.replica_id(),
1222 selections,
1223 lamport_timestamp,
1224 },
1225 cx,
1226 );
1227 }
1228
1229 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1230 let lamport_timestamp = self.text.lamport_clock.tick();
1231 self.send_operation(
1232 Operation::RemoveSelections {
1233 replica_id: self.text.replica_id(),
1234 lamport_timestamp,
1235 },
1236 cx,
1237 );
1238 }
1239
1240 fn update_language_server(&mut self) {
1241 let language_server = if let Some(language_server) = self.language_server.as_mut() {
1242 language_server
1243 } else {
1244 return;
1245 };
1246 let abs_path = self
1247 .file
1248 .as_ref()
1249 .map_or(Path::new("/").to_path_buf(), |file| {
1250 file.abs_path().unwrap()
1251 });
1252
1253 let version = post_inc(&mut language_server.next_version);
1254 let snapshot = LanguageServerSnapshot {
1255 buffer_snapshot: self.text.snapshot(),
1256 version,
1257 path: Arc::from(abs_path),
1258 };
1259 language_server
1260 .pending_snapshots
1261 .insert(version, snapshot.clone());
1262 let _ = language_server
1263 .latest_snapshot
1264 .blocking_send(Some(snapshot));
1265 }
1266
1267 pub fn edit<I, S, T>(&mut self, ranges_iter: I, new_text: T, cx: &mut ModelContext<Self>)
1268 where
1269 I: IntoIterator<Item = Range<S>>,
1270 S: ToOffset,
1271 T: Into<String>,
1272 {
1273 self.edit_internal(ranges_iter, new_text, false, cx)
1274 }
1275
1276 pub fn edit_with_autoindent<I, S, T>(
1277 &mut self,
1278 ranges_iter: I,
1279 new_text: T,
1280 cx: &mut ModelContext<Self>,
1281 ) where
1282 I: IntoIterator<Item = Range<S>>,
1283 S: ToOffset,
1284 T: Into<String>,
1285 {
1286 self.edit_internal(ranges_iter, new_text, true, cx)
1287 }
1288
1289 pub fn edit_internal<I, S, T>(
1290 &mut self,
1291 ranges_iter: I,
1292 new_text: T,
1293 autoindent: bool,
1294 cx: &mut ModelContext<Self>,
1295 ) where
1296 I: IntoIterator<Item = Range<S>>,
1297 S: ToOffset,
1298 T: Into<String>,
1299 {
1300 let new_text = new_text.into();
1301
1302 // Skip invalid ranges and coalesce contiguous ones.
1303 let mut ranges: Vec<Range<usize>> = Vec::new();
1304 for range in ranges_iter {
1305 let range = range.start.to_offset(self)..range.end.to_offset(self);
1306 if !new_text.is_empty() || !range.is_empty() {
1307 if let Some(prev_range) = ranges.last_mut() {
1308 if prev_range.end >= range.start {
1309 prev_range.end = cmp::max(prev_range.end, range.end);
1310 } else {
1311 ranges.push(range);
1312 }
1313 } else {
1314 ranges.push(range);
1315 }
1316 }
1317 }
1318 if ranges.is_empty() {
1319 return;
1320 }
1321
1322 self.start_transaction();
1323 self.pending_autoindent.take();
1324 let autoindent_request = if autoindent && self.language.is_some() {
1325 let before_edit = self.snapshot();
1326 let edited = ranges
1327 .iter()
1328 .filter_map(|range| {
1329 let start = range.start.to_point(self);
1330 if new_text.starts_with('\n') && start.column == self.line_len(start.row) {
1331 None
1332 } else {
1333 Some(self.anchor_before(range.start))
1334 }
1335 })
1336 .collect();
1337 Some((before_edit, edited))
1338 } else {
1339 None
1340 };
1341
1342 let first_newline_ix = new_text.find('\n');
1343 let new_text_len = new_text.len();
1344
1345 let edit = self.text.edit(ranges.iter().cloned(), new_text);
1346
1347 if let Some((before_edit, edited)) = autoindent_request {
1348 let mut inserted = None;
1349 if let Some(first_newline_ix) = first_newline_ix {
1350 let mut delta = 0isize;
1351 inserted = Some(
1352 ranges
1353 .iter()
1354 .map(|range| {
1355 let start =
1356 (delta + range.start as isize) as usize + first_newline_ix + 1;
1357 let end = (delta + range.start as isize) as usize + new_text_len;
1358 delta +=
1359 (range.end as isize - range.start as isize) + new_text_len as isize;
1360 self.anchor_before(start)..self.anchor_after(end)
1361 })
1362 .collect(),
1363 );
1364 }
1365
1366 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1367 before_edit,
1368 edited,
1369 inserted,
1370 }));
1371 }
1372
1373 self.end_transaction(cx);
1374 self.send_operation(Operation::Buffer(text::Operation::Edit(edit)), cx);
1375 }
1376
1377 fn did_edit(
1378 &mut self,
1379 old_version: &clock::Global,
1380 was_dirty: bool,
1381 cx: &mut ModelContext<Self>,
1382 ) {
1383 if self.edits_since::<usize>(old_version).next().is_none() {
1384 return;
1385 }
1386
1387 self.reparse(cx);
1388 self.update_language_server();
1389
1390 cx.emit(Event::Edited);
1391 if !was_dirty {
1392 cx.emit(Event::Dirtied);
1393 }
1394 cx.notify();
1395 }
1396
1397 fn grammar(&self) -> Option<&Arc<Grammar>> {
1398 self.language.as_ref().and_then(|l| l.grammar.as_ref())
1399 }
1400
1401 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1402 &mut self,
1403 ops: I,
1404 cx: &mut ModelContext<Self>,
1405 ) -> Result<()> {
1406 self.pending_autoindent.take();
1407 let was_dirty = self.is_dirty();
1408 let old_version = self.version.clone();
1409 let mut deferred_ops = Vec::new();
1410 let buffer_ops = ops
1411 .into_iter()
1412 .filter_map(|op| match op {
1413 Operation::Buffer(op) => Some(op),
1414 _ => {
1415 if self.can_apply_op(&op) {
1416 self.apply_op(op, cx);
1417 } else {
1418 deferred_ops.push(op);
1419 }
1420 None
1421 }
1422 })
1423 .collect::<Vec<_>>();
1424 self.text.apply_ops(buffer_ops)?;
1425 self.flush_deferred_ops(cx);
1426 self.did_edit(&old_version, was_dirty, cx);
1427 // Notify independently of whether the buffer was edited as the operations could include a
1428 // selection update.
1429 cx.notify();
1430 Ok(())
1431 }
1432
1433 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1434 let mut deferred_ops = Vec::new();
1435 for op in self.deferred_ops.drain().iter().cloned() {
1436 if self.can_apply_op(&op) {
1437 self.apply_op(op, cx);
1438 } else {
1439 deferred_ops.push(op);
1440 }
1441 }
1442 self.deferred_ops.insert(deferred_ops);
1443 }
1444
1445 fn can_apply_op(&self, operation: &Operation) -> bool {
1446 match operation {
1447 Operation::Buffer(_) => {
1448 unreachable!("buffer operations should never be applied at this layer")
1449 }
1450 Operation::UpdateDiagnostics { diagnostics, .. } => {
1451 diagnostics.iter().all(|diagnostic| {
1452 self.text.can_resolve(&diagnostic.range.start)
1453 && self.text.can_resolve(&diagnostic.range.end)
1454 })
1455 }
1456 Operation::UpdateSelections { selections, .. } => selections
1457 .iter()
1458 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1459 Operation::RemoveSelections { .. } => true,
1460 }
1461 }
1462
1463 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1464 match operation {
1465 Operation::Buffer(_) => {
1466 unreachable!("buffer operations should never be applied at this layer")
1467 }
1468 Operation::UpdateDiagnostics { diagnostics, .. } => {
1469 self.apply_diagnostic_update(diagnostics, cx);
1470 }
1471 Operation::UpdateSelections {
1472 replica_id,
1473 selections,
1474 lamport_timestamp,
1475 } => {
1476 self.remote_selections.insert(replica_id, selections);
1477 self.text.lamport_clock.observe(lamport_timestamp);
1478 }
1479 Operation::RemoveSelections {
1480 replica_id,
1481 lamport_timestamp,
1482 } => {
1483 self.remote_selections.remove(&replica_id);
1484 self.text.lamport_clock.observe(lamport_timestamp);
1485 }
1486 }
1487 }
1488
1489 fn apply_diagnostic_update(
1490 &mut self,
1491 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
1492 cx: &mut ModelContext<Self>,
1493 ) {
1494 self.diagnostics = DiagnosticSet::from_sorted_entries(diagnostics.iter().cloned(), self);
1495 self.diagnostics_update_count += 1;
1496 cx.notify();
1497 }
1498
1499 #[cfg(not(test))]
1500 pub fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1501 if let Some(file) = &self.file {
1502 file.buffer_updated(self.remote_id(), operation, cx.as_mut());
1503 }
1504 }
1505
1506 #[cfg(test)]
1507 pub fn send_operation(&mut self, operation: Operation, _: &mut ModelContext<Self>) {
1508 self.operations.push(operation);
1509 }
1510
1511 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1512 self.remote_selections.remove(&replica_id);
1513 cx.notify();
1514 }
1515
1516 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1517 let was_dirty = self.is_dirty();
1518 let old_version = self.version.clone();
1519
1520 if let Some((transaction_id, operation)) = self.text.undo() {
1521 self.send_operation(Operation::Buffer(operation), cx);
1522 self.did_edit(&old_version, was_dirty, cx);
1523 Some(transaction_id)
1524 } else {
1525 None
1526 }
1527 }
1528
1529 pub fn undo_transaction(
1530 &mut self,
1531 transaction_id: TransactionId,
1532 cx: &mut ModelContext<Self>,
1533 ) -> bool {
1534 let was_dirty = self.is_dirty();
1535 let old_version = self.version.clone();
1536
1537 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1538 self.send_operation(Operation::Buffer(operation), cx);
1539 self.did_edit(&old_version, was_dirty, cx);
1540 true
1541 } else {
1542 false
1543 }
1544 }
1545
1546 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1547 let was_dirty = self.is_dirty();
1548 let old_version = self.version.clone();
1549
1550 if let Some((transaction_id, operation)) = self.text.redo() {
1551 self.send_operation(Operation::Buffer(operation), cx);
1552 self.did_edit(&old_version, was_dirty, cx);
1553 Some(transaction_id)
1554 } else {
1555 None
1556 }
1557 }
1558
1559 pub fn redo_transaction(
1560 &mut self,
1561 transaction_id: TransactionId,
1562 cx: &mut ModelContext<Self>,
1563 ) -> bool {
1564 let was_dirty = self.is_dirty();
1565 let old_version = self.version.clone();
1566
1567 if let Some(operation) = self.text.redo_transaction(transaction_id) {
1568 self.send_operation(Operation::Buffer(operation), cx);
1569 self.did_edit(&old_version, was_dirty, cx);
1570 true
1571 } else {
1572 false
1573 }
1574 }
1575}
1576
1577#[cfg(any(test, feature = "test-support"))]
1578impl Buffer {
1579 pub fn randomly_edit<T>(
1580 &mut self,
1581 rng: &mut T,
1582 old_range_count: usize,
1583 cx: &mut ModelContext<Self>,
1584 ) where
1585 T: rand::Rng,
1586 {
1587 self.start_transaction();
1588 self.text.randomly_edit(rng, old_range_count);
1589 self.end_transaction(cx);
1590 }
1591}
1592
1593impl Entity for Buffer {
1594 type Event = Event;
1595
1596 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
1597 if let Some(file) = self.file.as_ref() {
1598 file.buffer_removed(self.remote_id(), cx);
1599 }
1600 }
1601}
1602
1603impl Deref for Buffer {
1604 type Target = TextBuffer;
1605
1606 fn deref(&self) -> &Self::Target {
1607 &self.text
1608 }
1609}
1610
1611impl BufferSnapshot {
1612 fn suggest_autoindents<'a>(
1613 &'a self,
1614 row_range: Range<u32>,
1615 ) -> Option<impl Iterator<Item = IndentSuggestion> + 'a> {
1616 let mut query_cursor = QueryCursorHandle::new();
1617 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1618 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1619
1620 // Get the "indentation ranges" that intersect this row range.
1621 let indent_capture_ix = grammar.indents_query.capture_index_for_name("indent");
1622 let end_capture_ix = grammar.indents_query.capture_index_for_name("end");
1623 query_cursor.set_point_range(
1624 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0).to_ts_point()
1625 ..Point::new(row_range.end, 0).to_ts_point(),
1626 );
1627 let mut indentation_ranges = Vec::<(Range<Point>, &'static str)>::new();
1628 for mat in query_cursor.matches(
1629 &grammar.indents_query,
1630 tree.root_node(),
1631 TextProvider(self.as_rope()),
1632 ) {
1633 let mut node_kind = "";
1634 let mut start: Option<Point> = None;
1635 let mut end: Option<Point> = None;
1636 for capture in mat.captures {
1637 if Some(capture.index) == indent_capture_ix {
1638 node_kind = capture.node.kind();
1639 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1640 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1641 } else if Some(capture.index) == end_capture_ix {
1642 end = Some(Point::from_ts_point(capture.node.start_position().into()));
1643 }
1644 }
1645
1646 if let Some((start, end)) = start.zip(end) {
1647 if start.row == end.row {
1648 continue;
1649 }
1650
1651 let range = start..end;
1652 match indentation_ranges.binary_search_by_key(&range.start, |r| r.0.start) {
1653 Err(ix) => indentation_ranges.insert(ix, (range, node_kind)),
1654 Ok(ix) => {
1655 let prev_range = &mut indentation_ranges[ix];
1656 prev_range.0.end = prev_range.0.end.max(range.end);
1657 }
1658 }
1659 }
1660 }
1661
1662 let mut prev_row = prev_non_blank_row.unwrap_or(0);
1663 Some(row_range.map(move |row| {
1664 let row_start = Point::new(row, self.indent_column_for_line(row));
1665
1666 let mut indent_from_prev_row = false;
1667 let mut outdent_to_row = u32::MAX;
1668 for (range, _node_kind) in &indentation_ranges {
1669 if range.start.row >= row {
1670 break;
1671 }
1672
1673 if range.start.row == prev_row && range.end > row_start {
1674 indent_from_prev_row = true;
1675 }
1676 if range.end.row >= prev_row && range.end <= row_start {
1677 outdent_to_row = outdent_to_row.min(range.start.row);
1678 }
1679 }
1680
1681 let suggestion = if outdent_to_row == prev_row {
1682 IndentSuggestion {
1683 basis_row: prev_row,
1684 indent: false,
1685 }
1686 } else if indent_from_prev_row {
1687 IndentSuggestion {
1688 basis_row: prev_row,
1689 indent: true,
1690 }
1691 } else if outdent_to_row < prev_row {
1692 IndentSuggestion {
1693 basis_row: outdent_to_row,
1694 indent: false,
1695 }
1696 } else {
1697 IndentSuggestion {
1698 basis_row: prev_row,
1699 indent: false,
1700 }
1701 };
1702
1703 prev_row = row;
1704 suggestion
1705 }))
1706 } else {
1707 None
1708 }
1709 }
1710
1711 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
1712 while row > 0 {
1713 row -= 1;
1714 if !self.is_line_blank(row) {
1715 return Some(row);
1716 }
1717 }
1718 None
1719 }
1720
1721 pub fn chunks<'a, T: ToOffset>(
1722 &'a self,
1723 range: Range<T>,
1724 theme: Option<&'a SyntaxTheme>,
1725 ) -> BufferChunks<'a> {
1726 let range = range.start.to_offset(self)..range.end.to_offset(self);
1727
1728 let mut highlights = None;
1729 let mut diagnostic_endpoints = Vec::<DiagnosticEndpoint>::new();
1730 if let Some(theme) = theme {
1731 for entry in self
1732 .diagnostics
1733 .range::<_, usize>(range.clone(), self, true)
1734 {
1735 diagnostic_endpoints.push(DiagnosticEndpoint {
1736 offset: entry.range.start,
1737 is_start: true,
1738 severity: entry.diagnostic.severity,
1739 });
1740 diagnostic_endpoints.push(DiagnosticEndpoint {
1741 offset: entry.range.end,
1742 is_start: false,
1743 severity: entry.diagnostic.severity,
1744 });
1745 }
1746 diagnostic_endpoints
1747 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
1748
1749 if let Some((grammar, tree)) = self.grammar().zip(self.tree.as_ref()) {
1750 let mut query_cursor = QueryCursorHandle::new();
1751
1752 // TODO - add a Tree-sitter API to remove the need for this.
1753 let cursor = unsafe {
1754 std::mem::transmute::<_, &'static mut QueryCursor>(query_cursor.deref_mut())
1755 };
1756 let captures = cursor.set_byte_range(range.clone()).captures(
1757 &grammar.highlights_query,
1758 tree.root_node(),
1759 TextProvider(self.text.as_rope()),
1760 );
1761 highlights = Some(BufferChunkHighlights {
1762 captures,
1763 next_capture: None,
1764 stack: Default::default(),
1765 highlight_map: grammar.highlight_map(),
1766 _query_cursor: query_cursor,
1767 theme,
1768 })
1769 }
1770 }
1771
1772 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
1773 let chunks = self.text.as_rope().chunks_in_range(range.clone());
1774
1775 BufferChunks {
1776 range,
1777 chunks,
1778 diagnostic_endpoints,
1779 error_depth: 0,
1780 warning_depth: 0,
1781 information_depth: 0,
1782 hint_depth: 0,
1783 highlights,
1784 }
1785 }
1786
1787 pub fn language(&self) -> Option<&Arc<Language>> {
1788 self.language.as_ref()
1789 }
1790
1791 fn grammar(&self) -> Option<&Arc<Grammar>> {
1792 self.language
1793 .as_ref()
1794 .and_then(|language| language.grammar.as_ref())
1795 }
1796
1797 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
1798 if let Some(tree) = self.tree.as_ref() {
1799 let root = tree.root_node();
1800 let range = range.start.to_offset(self)..range.end.to_offset(self);
1801 let mut node = root.descendant_for_byte_range(range.start, range.end);
1802 while node.map_or(false, |n| n.byte_range() == range) {
1803 node = node.unwrap().parent();
1804 }
1805 node.map(|n| n.byte_range())
1806 } else {
1807 None
1808 }
1809 }
1810
1811 pub fn enclosing_bracket_ranges<T: ToOffset>(
1812 &self,
1813 range: Range<T>,
1814 ) -> Option<(Range<usize>, Range<usize>)> {
1815 let (grammar, tree) = self.grammar().zip(self.tree.as_ref())?;
1816 let open_capture_ix = grammar.brackets_query.capture_index_for_name("open")?;
1817 let close_capture_ix = grammar.brackets_query.capture_index_for_name("close")?;
1818
1819 // Find bracket pairs that *inclusively* contain the given range.
1820 let range = range.start.to_offset(self).saturating_sub(1)..range.end.to_offset(self) + 1;
1821 let mut cursor = QueryCursorHandle::new();
1822 let matches = cursor.set_byte_range(range).matches(
1823 &grammar.brackets_query,
1824 tree.root_node(),
1825 TextProvider(self.as_rope()),
1826 );
1827
1828 // Get the ranges of the innermost pair of brackets.
1829 matches
1830 .filter_map(|mat| {
1831 let open = mat.nodes_for_capture_index(open_capture_ix).next()?;
1832 let close = mat.nodes_for_capture_index(close_capture_ix).next()?;
1833 Some((open.byte_range(), close.byte_range()))
1834 })
1835 .min_by_key(|(open_range, close_range)| close_range.end - open_range.start)
1836 }
1837
1838 pub fn remote_selections_in_range<'a>(
1839 &'a self,
1840 range: Range<Anchor>,
1841 ) -> impl 'a + Iterator<Item = (ReplicaId, impl 'a + Iterator<Item = &'a Selection<Anchor>>)>
1842 {
1843 self.remote_selections
1844 .iter()
1845 .filter(|(replica_id, _)| **replica_id != self.text.replica_id())
1846 .map(move |(replica_id, selections)| {
1847 let start_ix = match selections
1848 .binary_search_by(|probe| probe.end.cmp(&range.start, self).unwrap())
1849 {
1850 Ok(ix) | Err(ix) => ix,
1851 };
1852 let end_ix = match selections
1853 .binary_search_by(|probe| probe.start.cmp(&range.end, self).unwrap())
1854 {
1855 Ok(ix) | Err(ix) => ix,
1856 };
1857
1858 (*replica_id, selections[start_ix..end_ix].iter())
1859 })
1860 }
1861
1862 pub fn all_diagnostics<'a, O>(&'a self) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1863 where
1864 O: 'a + FromAnchor,
1865 {
1866 self.diagnostics
1867 .iter()
1868 .map(|diagnostic| diagnostic.resolve(self))
1869 }
1870
1871 pub fn diagnostics_in_range<'a, T, O>(
1872 &'a self,
1873 search_range: Range<T>,
1874 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1875 where
1876 T: 'a + ToOffset,
1877 O: 'a + FromAnchor,
1878 {
1879 self.diagnostics.range(search_range, self, true)
1880 }
1881
1882 pub fn diagnostic_groups<O>(&self) -> Vec<DiagnosticGroup<O>>
1883 where
1884 O: FromAnchor + Ord + Copy,
1885 {
1886 self.diagnostics.groups(self)
1887 }
1888
1889 pub fn diagnostic_group<'a, O>(
1890 &'a self,
1891 group_id: usize,
1892 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
1893 where
1894 O: 'a + FromAnchor,
1895 {
1896 self.diagnostics.group(group_id, self)
1897 }
1898
1899 pub fn diagnostics_update_count(&self) -> usize {
1900 self.diagnostics_update_count
1901 }
1902
1903 pub fn parse_count(&self) -> usize {
1904 self.parse_count
1905 }
1906}
1907
1908impl Clone for BufferSnapshot {
1909 fn clone(&self) -> Self {
1910 Self {
1911 text: self.text.clone(),
1912 tree: self.tree.clone(),
1913 remote_selections: self.remote_selections.clone(),
1914 diagnostics: self.diagnostics.clone(),
1915 diagnostics_update_count: self.diagnostics_update_count,
1916 is_parsing: self.is_parsing,
1917 language: self.language.clone(),
1918 parse_count: self.parse_count,
1919 }
1920 }
1921}
1922
1923impl Deref for BufferSnapshot {
1924 type Target = text::BufferSnapshot;
1925
1926 fn deref(&self) -> &Self::Target {
1927 &self.text
1928 }
1929}
1930
1931impl<'a> tree_sitter::TextProvider<'a> for TextProvider<'a> {
1932 type I = ByteChunks<'a>;
1933
1934 fn text(&mut self, node: tree_sitter::Node) -> Self::I {
1935 ByteChunks(self.0.chunks_in_range(node.byte_range()))
1936 }
1937}
1938
1939struct ByteChunks<'a>(rope::Chunks<'a>);
1940
1941impl<'a> Iterator for ByteChunks<'a> {
1942 type Item = &'a [u8];
1943
1944 fn next(&mut self) -> Option<Self::Item> {
1945 self.0.next().map(str::as_bytes)
1946 }
1947}
1948
1949unsafe impl<'a> Send for BufferChunks<'a> {}
1950
1951impl<'a> BufferChunks<'a> {
1952 pub fn seek(&mut self, offset: usize) {
1953 self.range.start = offset;
1954 self.chunks.seek(self.range.start);
1955 if let Some(highlights) = self.highlights.as_mut() {
1956 highlights
1957 .stack
1958 .retain(|(end_offset, _)| *end_offset > offset);
1959 if let Some((mat, capture_ix)) = &highlights.next_capture {
1960 let capture = mat.captures[*capture_ix as usize];
1961 if offset >= capture.node.start_byte() {
1962 let next_capture_end = capture.node.end_byte();
1963 if offset < next_capture_end {
1964 highlights.stack.push((
1965 next_capture_end,
1966 highlights.highlight_map.get(capture.index),
1967 ));
1968 }
1969 highlights.next_capture.take();
1970 }
1971 }
1972 highlights.captures.set_byte_range(self.range.clone());
1973 }
1974 }
1975
1976 pub fn offset(&self) -> usize {
1977 self.range.start
1978 }
1979
1980 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
1981 let depth = match endpoint.severity {
1982 DiagnosticSeverity::ERROR => &mut self.error_depth,
1983 DiagnosticSeverity::WARNING => &mut self.warning_depth,
1984 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
1985 DiagnosticSeverity::HINT => &mut self.hint_depth,
1986 _ => return,
1987 };
1988 if endpoint.is_start {
1989 *depth += 1;
1990 } else {
1991 *depth -= 1;
1992 }
1993 }
1994
1995 fn current_diagnostic_severity(&mut self) -> Option<DiagnosticSeverity> {
1996 if self.error_depth > 0 {
1997 Some(DiagnosticSeverity::ERROR)
1998 } else if self.warning_depth > 0 {
1999 Some(DiagnosticSeverity::WARNING)
2000 } else if self.information_depth > 0 {
2001 Some(DiagnosticSeverity::INFORMATION)
2002 } else if self.hint_depth > 0 {
2003 Some(DiagnosticSeverity::HINT)
2004 } else {
2005 None
2006 }
2007 }
2008}
2009
2010impl<'a> Iterator for BufferChunks<'a> {
2011 type Item = Chunk<'a>;
2012
2013 fn next(&mut self) -> Option<Self::Item> {
2014 let mut next_capture_start = usize::MAX;
2015 let mut next_diagnostic_endpoint = usize::MAX;
2016
2017 if let Some(highlights) = self.highlights.as_mut() {
2018 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2019 if *parent_capture_end <= self.range.start {
2020 highlights.stack.pop();
2021 } else {
2022 break;
2023 }
2024 }
2025
2026 if highlights.next_capture.is_none() {
2027 highlights.next_capture = highlights.captures.next();
2028 }
2029
2030 while let Some((mat, capture_ix)) = highlights.next_capture.as_ref() {
2031 let capture = mat.captures[*capture_ix as usize];
2032 if self.range.start < capture.node.start_byte() {
2033 next_capture_start = capture.node.start_byte();
2034 break;
2035 } else {
2036 let highlight_id = highlights.highlight_map.get(capture.index);
2037 highlights
2038 .stack
2039 .push((capture.node.end_byte(), highlight_id));
2040 highlights.next_capture = highlights.captures.next();
2041 }
2042 }
2043 }
2044
2045 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2046 if endpoint.offset <= self.range.start {
2047 self.update_diagnostic_depths(endpoint);
2048 self.diagnostic_endpoints.next();
2049 } else {
2050 next_diagnostic_endpoint = endpoint.offset;
2051 break;
2052 }
2053 }
2054
2055 if let Some(chunk) = self.chunks.peek() {
2056 let chunk_start = self.range.start;
2057 let mut chunk_end = (self.chunks.offset() + chunk.len())
2058 .min(next_capture_start)
2059 .min(next_diagnostic_endpoint);
2060 let mut highlight_style = None;
2061 if let Some(highlights) = self.highlights.as_ref() {
2062 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2063 chunk_end = chunk_end.min(*parent_capture_end);
2064 highlight_style = parent_highlight_id.style(highlights.theme);
2065 }
2066 }
2067
2068 let slice =
2069 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2070 self.range.start = chunk_end;
2071 if self.range.start == self.chunks.offset() + chunk.len() {
2072 self.chunks.next().unwrap();
2073 }
2074
2075 Some(Chunk {
2076 text: slice,
2077 highlight_style,
2078 diagnostic: self.current_diagnostic_severity(),
2079 })
2080 } else {
2081 None
2082 }
2083 }
2084}
2085
2086impl QueryCursorHandle {
2087 fn new() -> Self {
2088 QueryCursorHandle(Some(
2089 QUERY_CURSORS
2090 .lock()
2091 .pop()
2092 .unwrap_or_else(|| QueryCursor::new()),
2093 ))
2094 }
2095}
2096
2097impl Deref for QueryCursorHandle {
2098 type Target = QueryCursor;
2099
2100 fn deref(&self) -> &Self::Target {
2101 self.0.as_ref().unwrap()
2102 }
2103}
2104
2105impl DerefMut for QueryCursorHandle {
2106 fn deref_mut(&mut self) -> &mut Self::Target {
2107 self.0.as_mut().unwrap()
2108 }
2109}
2110
2111impl Drop for QueryCursorHandle {
2112 fn drop(&mut self) {
2113 let mut cursor = self.0.take().unwrap();
2114 cursor.set_byte_range(0..usize::MAX);
2115 cursor.set_point_range(Point::zero().to_ts_point()..Point::MAX.to_ts_point());
2116 QUERY_CURSORS.lock().push(cursor)
2117 }
2118}
2119
2120trait ToTreeSitterPoint {
2121 fn to_ts_point(self) -> tree_sitter::Point;
2122 fn from_ts_point(point: tree_sitter::Point) -> Self;
2123}
2124
2125impl ToTreeSitterPoint for Point {
2126 fn to_ts_point(self) -> tree_sitter::Point {
2127 tree_sitter::Point::new(self.row as usize, self.column as usize)
2128 }
2129
2130 fn from_ts_point(point: tree_sitter::Point) -> Self {
2131 Point::new(point.row as u32, point.column as u32)
2132 }
2133}
2134
2135impl operation_queue::Operation for Operation {
2136 fn lamport_timestamp(&self) -> clock::Lamport {
2137 match self {
2138 Operation::Buffer(_) => {
2139 unreachable!("buffer operations should never be deferred at this layer")
2140 }
2141 Operation::UpdateDiagnostics {
2142 lamport_timestamp, ..
2143 }
2144 | Operation::UpdateSelections {
2145 lamport_timestamp, ..
2146 }
2147 | Operation::RemoveSelections {
2148 lamport_timestamp, ..
2149 } => *lamport_timestamp,
2150 }
2151 }
2152}
2153
2154impl Default for Diagnostic {
2155 fn default() -> Self {
2156 Self {
2157 source: Default::default(),
2158 code: Default::default(),
2159 severity: DiagnosticSeverity::ERROR,
2160 message: Default::default(),
2161 group_id: Default::default(),
2162 is_primary: Default::default(),
2163 is_valid: true,
2164 }
2165 }
2166}
2167
2168pub fn contiguous_ranges(
2169 values: impl Iterator<Item = u32>,
2170 max_len: usize,
2171) -> impl Iterator<Item = Range<u32>> {
2172 let mut values = values.into_iter();
2173 let mut current_range: Option<Range<u32>> = None;
2174 std::iter::from_fn(move || loop {
2175 if let Some(value) = values.next() {
2176 if let Some(range) = &mut current_range {
2177 if value == range.end && range.len() < max_len {
2178 range.end += 1;
2179 continue;
2180 }
2181 }
2182
2183 let prev_range = current_range.clone();
2184 current_range = Some(value..(value + 1));
2185 if prev_range.is_some() {
2186 return prev_range;
2187 }
2188 } else {
2189 return current_range.take();
2190 }
2191 })
2192}