1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 markdown::RenderedMarkdown,
5 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
6};
7use crate::{
8 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
9 language_settings::{language_settings, LanguageSettings},
10 outline::OutlineItem,
11 syntax_map::{
12 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
13 SyntaxSnapshot, ToTreeSitterPoint,
14 },
15 CodeLabel, LanguageScope, Outline,
16};
17use anyhow::{anyhow, Result};
18pub use clock::ReplicaId;
19use futures::FutureExt as _;
20use gpui::{fonts::HighlightStyle, AppContext, Entity, ModelContext, Task};
21use lsp::LanguageServerId;
22use parking_lot::Mutex;
23use similar::{ChangeTag, TextDiff};
24use smallvec::SmallVec;
25use smol::future::yield_now;
26use std::{
27 any::Any,
28 cmp::{self, Ordering},
29 collections::BTreeMap,
30 ffi::OsStr,
31 future::Future,
32 iter::{self, Iterator, Peekable},
33 mem,
34 ops::{Deref, Range},
35 path::{Path, PathBuf},
36 str,
37 sync::Arc,
38 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
39 vec,
40};
41use sum_tree::TreeMap;
42use text::operation_queue::OperationQueue;
43pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
44use theme::SyntaxTheme;
45#[cfg(any(test, feature = "test-support"))]
46use util::RandomCharIter;
47use util::{RangeExt, TryFutureExt as _};
48
49#[cfg(any(test, feature = "test-support"))]
50pub use {tree_sitter_rust, tree_sitter_typescript};
51
52pub use lsp::DiagnosticSeverity;
53
54pub struct Buffer {
55 text: TextBuffer,
56 diff_base: Option<String>,
57 git_diff: git::diff::BufferDiff,
58 file: Option<Arc<dyn File>>,
59 saved_version: clock::Global,
60 saved_version_fingerprint: RopeFingerprint,
61 saved_mtime: SystemTime,
62 transaction_depth: usize,
63 was_dirty_before_starting_transaction: Option<bool>,
64 language: Option<Arc<Language>>,
65 autoindent_requests: Vec<Arc<AutoindentRequest>>,
66 pending_autoindent: Option<Task<()>>,
67 sync_parse_timeout: Duration,
68 syntax_map: Mutex<SyntaxMap>,
69 parsing_in_background: bool,
70 parse_count: usize,
71 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
72 remote_selections: TreeMap<ReplicaId, SelectionSet>,
73 selections_update_count: usize,
74 diagnostics_update_count: usize,
75 diagnostics_timestamp: clock::Lamport,
76 file_update_count: usize,
77 git_diff_update_count: usize,
78 completion_triggers: Vec<String>,
79 completion_triggers_timestamp: clock::Lamport,
80 deferred_ops: OperationQueue<Operation>,
81}
82
83pub struct BufferSnapshot {
84 text: text::BufferSnapshot,
85 pub git_diff: git::diff::BufferDiff,
86 pub(crate) syntax: SyntaxSnapshot,
87 file: Option<Arc<dyn File>>,
88 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
89 diagnostics_update_count: usize,
90 file_update_count: usize,
91 git_diff_update_count: usize,
92 remote_selections: TreeMap<ReplicaId, SelectionSet>,
93 selections_update_count: usize,
94 language: Option<Arc<Language>>,
95 parse_count: usize,
96}
97
98#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
99pub struct IndentSize {
100 pub len: u32,
101 pub kind: IndentKind,
102}
103
104#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
105pub enum IndentKind {
106 #[default]
107 Space,
108 Tab,
109}
110
111#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
112pub enum CursorShape {
113 #[default]
114 Bar,
115 Block,
116 Underscore,
117 Hollow,
118}
119
120#[derive(Clone, Debug)]
121struct SelectionSet {
122 line_mode: bool,
123 cursor_shape: CursorShape,
124 selections: Arc<[Selection<Anchor>]>,
125 lamport_timestamp: clock::Lamport,
126}
127
128#[derive(Clone, Debug, PartialEq, Eq)]
129pub struct GroupId {
130 source: Arc<str>,
131 id: usize,
132}
133
134#[derive(Clone, Debug, PartialEq, Eq)]
135pub struct Diagnostic {
136 pub source: Option<String>,
137 pub code: Option<String>,
138 pub severity: DiagnosticSeverity,
139 pub message: String,
140 pub group_id: usize,
141 pub is_valid: bool,
142 pub is_primary: bool,
143 pub is_disk_based: bool,
144 pub is_unnecessary: bool,
145}
146
147#[derive(Clone, Debug)]
148pub struct Completion {
149 pub old_range: Range<Anchor>,
150 pub new_text: String,
151 pub label: CodeLabel,
152 pub alongside_documentation: Option<RenderedMarkdown>,
153 pub server_id: LanguageServerId,
154 pub lsp_completion: lsp::CompletionItem,
155}
156
157#[derive(Clone, Debug)]
158pub struct CodeAction {
159 pub server_id: LanguageServerId,
160 pub range: Range<Anchor>,
161 pub lsp_action: lsp::CodeAction,
162}
163
164#[derive(Clone, Debug, PartialEq, Eq)]
165pub enum Operation {
166 Buffer(text::Operation),
167
168 UpdateDiagnostics {
169 server_id: LanguageServerId,
170 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
171 lamport_timestamp: clock::Lamport,
172 },
173
174 UpdateSelections {
175 selections: Arc<[Selection<Anchor>]>,
176 lamport_timestamp: clock::Lamport,
177 line_mode: bool,
178 cursor_shape: CursorShape,
179 },
180
181 UpdateCompletionTriggers {
182 triggers: Vec<String>,
183 lamport_timestamp: clock::Lamport,
184 },
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub enum Event {
189 Operation(Operation),
190 Edited,
191 DirtyChanged,
192 Saved,
193 FileHandleChanged,
194 Reloaded,
195 DiffBaseChanged,
196 LanguageChanged,
197 Reparsed,
198 DiagnosticsUpdated,
199 Closed,
200}
201
202pub trait File: Send + Sync {
203 fn as_local(&self) -> Option<&dyn LocalFile>;
204
205 fn is_local(&self) -> bool {
206 self.as_local().is_some()
207 }
208
209 fn mtime(&self) -> SystemTime;
210
211 /// Returns the path of this file relative to the worktree's root directory.
212 fn path(&self) -> &Arc<Path>;
213
214 /// Returns the path of this file relative to the worktree's parent directory (this means it
215 /// includes the name of the worktree's root folder).
216 fn full_path(&self, cx: &AppContext) -> PathBuf;
217
218 /// Returns the last component of this handle's absolute path. If this handle refers to the root
219 /// of its worktree, then this method will return the name of the worktree itself.
220 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
221
222 /// Returns the id of the worktree to which this file belongs.
223 ///
224 /// This is needed for looking up project-specific settings.
225 fn worktree_id(&self) -> usize;
226
227 fn is_deleted(&self) -> bool;
228
229 fn as_any(&self) -> &dyn Any;
230
231 fn to_proto(&self) -> rpc::proto::File;
232}
233
234pub trait LocalFile: File {
235 /// Returns the absolute path of this file.
236 fn abs_path(&self, cx: &AppContext) -> PathBuf;
237
238 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
239
240 fn buffer_reloaded(
241 &self,
242 buffer_id: u64,
243 version: &clock::Global,
244 fingerprint: RopeFingerprint,
245 line_ending: LineEnding,
246 mtime: SystemTime,
247 cx: &mut AppContext,
248 );
249}
250
251#[derive(Clone, Debug)]
252pub enum AutoindentMode {
253 /// Indent each line of inserted text.
254 EachLine,
255 /// Apply the same indentation adjustment to all of the lines
256 /// in a given insertion.
257 Block {
258 /// The original indentation level of the first line of each
259 /// insertion, if it has been copied.
260 original_indent_columns: Vec<u32>,
261 },
262}
263
264#[derive(Clone)]
265struct AutoindentRequest {
266 before_edit: BufferSnapshot,
267 entries: Vec<AutoindentRequestEntry>,
268 is_block_mode: bool,
269}
270
271#[derive(Clone)]
272struct AutoindentRequestEntry {
273 /// A range of the buffer whose indentation should be adjusted.
274 range: Range<Anchor>,
275 /// Whether or not these lines should be considered brand new, for the
276 /// purpose of auto-indent. When text is not new, its indentation will
277 /// only be adjusted if the suggested indentation level has *changed*
278 /// since the edit was made.
279 first_line_is_new: bool,
280 indent_size: IndentSize,
281 original_indent_column: Option<u32>,
282}
283
284#[derive(Debug)]
285struct IndentSuggestion {
286 basis_row: u32,
287 delta: Ordering,
288 within_error: bool,
289}
290
291struct BufferChunkHighlights<'a> {
292 captures: SyntaxMapCaptures<'a>,
293 next_capture: Option<SyntaxMapCapture<'a>>,
294 stack: Vec<(usize, HighlightId)>,
295 highlight_maps: Vec<HighlightMap>,
296}
297
298pub struct BufferChunks<'a> {
299 range: Range<usize>,
300 chunks: text::Chunks<'a>,
301 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
302 error_depth: usize,
303 warning_depth: usize,
304 information_depth: usize,
305 hint_depth: usize,
306 unnecessary_depth: usize,
307 highlights: Option<BufferChunkHighlights<'a>>,
308}
309
310#[derive(Clone, Copy, Debug, Default)]
311pub struct Chunk<'a> {
312 pub text: &'a str,
313 pub syntax_highlight_id: Option<HighlightId>,
314 pub highlight_style: Option<HighlightStyle>,
315 pub diagnostic_severity: Option<DiagnosticSeverity>,
316 pub is_unnecessary: bool,
317 pub is_tab: bool,
318}
319
320pub struct Diff {
321 pub(crate) base_version: clock::Global,
322 line_ending: LineEnding,
323 edits: Vec<(Range<usize>, Arc<str>)>,
324}
325
326#[derive(Clone, Copy)]
327pub(crate) struct DiagnosticEndpoint {
328 offset: usize,
329 is_start: bool,
330 severity: DiagnosticSeverity,
331 is_unnecessary: bool,
332}
333
334#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
335pub enum CharKind {
336 Punctuation,
337 Whitespace,
338 Word,
339}
340
341impl CharKind {
342 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
343 if treat_punctuation_as_word && self == CharKind::Punctuation {
344 CharKind::Word
345 } else {
346 self
347 }
348 }
349}
350
351impl Buffer {
352 pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
353 Self::build(
354 TextBuffer::new(replica_id, id, base_text.into()),
355 None,
356 None,
357 )
358 }
359
360 pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
361 Self::build(
362 TextBuffer::new(replica_id, remote_id, base_text),
363 None,
364 None,
365 )
366 }
367
368 pub fn from_proto(
369 replica_id: ReplicaId,
370 message: proto::BufferState,
371 file: Option<Arc<dyn File>>,
372 ) -> Result<Self> {
373 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
374 let mut this = Self::build(
375 buffer,
376 message.diff_base.map(|text| text.into_boxed_str().into()),
377 file,
378 );
379 this.text.set_line_ending(proto::deserialize_line_ending(
380 rpc::proto::LineEnding::from_i32(message.line_ending)
381 .ok_or_else(|| anyhow!("missing line_ending"))?,
382 ));
383 this.saved_version = proto::deserialize_version(&message.saved_version);
384 this.saved_version_fingerprint =
385 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
386 this.saved_mtime = message
387 .saved_mtime
388 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
389 .into();
390 Ok(this)
391 }
392
393 pub fn to_proto(&self) -> proto::BufferState {
394 proto::BufferState {
395 id: self.remote_id(),
396 file: self.file.as_ref().map(|f| f.to_proto()),
397 base_text: self.base_text().to_string(),
398 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
399 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
400 saved_version: proto::serialize_version(&self.saved_version),
401 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
402 saved_mtime: Some(self.saved_mtime.into()),
403 }
404 }
405
406 pub fn serialize_ops(
407 &self,
408 since: Option<clock::Global>,
409 cx: &AppContext,
410 ) -> Task<Vec<proto::Operation>> {
411 let mut operations = Vec::new();
412 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
413
414 operations.extend(self.remote_selections.iter().map(|(_, set)| {
415 proto::serialize_operation(&Operation::UpdateSelections {
416 selections: set.selections.clone(),
417 lamport_timestamp: set.lamport_timestamp,
418 line_mode: set.line_mode,
419 cursor_shape: set.cursor_shape,
420 })
421 }));
422
423 for (server_id, diagnostics) in &self.diagnostics {
424 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
425 lamport_timestamp: self.diagnostics_timestamp,
426 server_id: *server_id,
427 diagnostics: diagnostics.iter().cloned().collect(),
428 }));
429 }
430
431 operations.push(proto::serialize_operation(
432 &Operation::UpdateCompletionTriggers {
433 triggers: self.completion_triggers.clone(),
434 lamport_timestamp: self.completion_triggers_timestamp,
435 },
436 ));
437
438 let text_operations = self.text.operations().clone();
439 cx.background().spawn(async move {
440 let since = since.unwrap_or_default();
441 operations.extend(
442 text_operations
443 .iter()
444 .filter(|(_, op)| !since.observed(op.timestamp()))
445 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
446 );
447 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
448 operations
449 })
450 }
451
452 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
453 self.set_language(Some(language), cx);
454 self
455 }
456
457 pub fn build(
458 buffer: TextBuffer,
459 diff_base: Option<String>,
460 file: Option<Arc<dyn File>>,
461 ) -> Self {
462 let saved_mtime = if let Some(file) = file.as_ref() {
463 file.mtime()
464 } else {
465 UNIX_EPOCH
466 };
467
468 Self {
469 saved_mtime,
470 saved_version: buffer.version(),
471 saved_version_fingerprint: buffer.as_rope().fingerprint(),
472 transaction_depth: 0,
473 was_dirty_before_starting_transaction: None,
474 text: buffer,
475 diff_base,
476 git_diff: git::diff::BufferDiff::new(),
477 file,
478 syntax_map: Mutex::new(SyntaxMap::new()),
479 parsing_in_background: false,
480 parse_count: 0,
481 sync_parse_timeout: Duration::from_millis(1),
482 autoindent_requests: Default::default(),
483 pending_autoindent: Default::default(),
484 language: None,
485 remote_selections: Default::default(),
486 selections_update_count: 0,
487 diagnostics: Default::default(),
488 diagnostics_update_count: 0,
489 diagnostics_timestamp: Default::default(),
490 file_update_count: 0,
491 git_diff_update_count: 0,
492 completion_triggers: Default::default(),
493 completion_triggers_timestamp: Default::default(),
494 deferred_ops: OperationQueue::new(),
495 }
496 }
497
498 pub fn snapshot(&self) -> BufferSnapshot {
499 let text = self.text.snapshot();
500 let mut syntax_map = self.syntax_map.lock();
501 syntax_map.interpolate(&text);
502 let syntax = syntax_map.snapshot();
503
504 BufferSnapshot {
505 text,
506 syntax,
507 git_diff: self.git_diff.clone(),
508 file: self.file.clone(),
509 remote_selections: self.remote_selections.clone(),
510 diagnostics: self.diagnostics.clone(),
511 diagnostics_update_count: self.diagnostics_update_count,
512 file_update_count: self.file_update_count,
513 git_diff_update_count: self.git_diff_update_count,
514 language: self.language.clone(),
515 parse_count: self.parse_count,
516 selections_update_count: self.selections_update_count,
517 }
518 }
519
520 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
521 &self.text
522 }
523
524 pub fn text_snapshot(&self) -> text::BufferSnapshot {
525 self.text.snapshot()
526 }
527
528 pub fn file(&self) -> Option<&Arc<dyn File>> {
529 self.file.as_ref()
530 }
531
532 pub fn saved_version(&self) -> &clock::Global {
533 &self.saved_version
534 }
535
536 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
537 self.saved_version_fingerprint
538 }
539
540 pub fn saved_mtime(&self) -> SystemTime {
541 self.saved_mtime
542 }
543
544 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
545 self.syntax_map.lock().clear();
546 self.language = language;
547 self.reparse(cx);
548 cx.emit(Event::LanguageChanged);
549 }
550
551 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
552 self.syntax_map
553 .lock()
554 .set_language_registry(language_registry);
555 }
556
557 pub fn did_save(
558 &mut self,
559 version: clock::Global,
560 fingerprint: RopeFingerprint,
561 mtime: SystemTime,
562 cx: &mut ModelContext<Self>,
563 ) {
564 self.saved_version = version;
565 self.saved_version_fingerprint = fingerprint;
566 self.saved_mtime = mtime;
567 cx.emit(Event::Saved);
568 cx.notify();
569 }
570
571 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
572 cx.spawn(|this, mut cx| async move {
573 if let Some((new_mtime, new_text)) = this.read_with(&cx, |this, cx| {
574 let file = this.file.as_ref()?.as_local()?;
575 Some((file.mtime(), file.load(cx)))
576 }) {
577 let new_text = new_text.await?;
578 let diff = this
579 .read_with(&cx, |this, cx| this.diff(new_text, cx))
580 .await;
581 this.update(&mut cx, |this, cx| {
582 if this.version() == diff.base_version {
583 this.finalize_last_transaction();
584 this.apply_diff(diff, cx);
585 if let Some(transaction) = this.finalize_last_transaction().cloned() {
586 this.did_reload(
587 this.version(),
588 this.as_rope().fingerprint(),
589 this.line_ending(),
590 new_mtime,
591 cx,
592 );
593 return Ok(Some(transaction));
594 }
595 }
596 Ok(None)
597 })
598 } else {
599 Ok(None)
600 }
601 })
602 }
603
604 pub fn did_reload(
605 &mut self,
606 version: clock::Global,
607 fingerprint: RopeFingerprint,
608 line_ending: LineEnding,
609 mtime: SystemTime,
610 cx: &mut ModelContext<Self>,
611 ) {
612 self.saved_version = version;
613 self.saved_version_fingerprint = fingerprint;
614 self.text.set_line_ending(line_ending);
615 self.saved_mtime = mtime;
616 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
617 file.buffer_reloaded(
618 self.remote_id(),
619 &self.saved_version,
620 self.saved_version_fingerprint,
621 self.line_ending(),
622 self.saved_mtime,
623 cx,
624 );
625 }
626 cx.emit(Event::Reloaded);
627 cx.notify();
628 }
629
630 pub fn file_updated(
631 &mut self,
632 new_file: Arc<dyn File>,
633 cx: &mut ModelContext<Self>,
634 ) -> Task<()> {
635 let mut file_changed = false;
636 let mut task = Task::ready(());
637
638 if let Some(old_file) = self.file.as_ref() {
639 if new_file.path() != old_file.path() {
640 file_changed = true;
641 }
642
643 if new_file.is_deleted() {
644 if !old_file.is_deleted() {
645 file_changed = true;
646 if !self.is_dirty() {
647 cx.emit(Event::DirtyChanged);
648 }
649 }
650 } else {
651 let new_mtime = new_file.mtime();
652 if new_mtime != old_file.mtime() {
653 file_changed = true;
654
655 if !self.is_dirty() {
656 let reload = self.reload(cx).log_err().map(drop);
657 task = cx.foreground().spawn(reload);
658 }
659 }
660 }
661 } else {
662 file_changed = true;
663 };
664
665 self.file = Some(new_file);
666 if file_changed {
667 self.file_update_count += 1;
668 cx.emit(Event::FileHandleChanged);
669 cx.notify();
670 }
671 task
672 }
673
674 pub fn diff_base(&self) -> Option<&str> {
675 self.diff_base.as_deref()
676 }
677
678 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
679 self.diff_base = diff_base;
680 self.git_diff_recalc(cx);
681 cx.emit(Event::DiffBaseChanged);
682 }
683
684 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
685 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
686 let snapshot = self.snapshot();
687
688 let mut diff = self.git_diff.clone();
689 let diff = cx.background().spawn(async move {
690 diff.update(&diff_base, &snapshot).await;
691 diff
692 });
693
694 let handle = cx.weak_handle();
695 Some(cx.spawn_weak(|_, mut cx| async move {
696 let buffer_diff = diff.await;
697 if let Some(this) = handle.upgrade(&mut cx) {
698 this.update(&mut cx, |this, _| {
699 this.git_diff = buffer_diff;
700 this.git_diff_update_count += 1;
701 })
702 }
703 }))
704 }
705
706 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
707 cx.emit(Event::Closed);
708 }
709
710 pub fn language(&self) -> Option<&Arc<Language>> {
711 self.language.as_ref()
712 }
713
714 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
715 let offset = position.to_offset(self);
716 self.syntax_map
717 .lock()
718 .layers_for_range(offset..offset, &self.text)
719 .last()
720 .map(|info| info.language.clone())
721 .or_else(|| self.language.clone())
722 }
723
724 pub fn parse_count(&self) -> usize {
725 self.parse_count
726 }
727
728 pub fn selections_update_count(&self) -> usize {
729 self.selections_update_count
730 }
731
732 pub fn diagnostics_update_count(&self) -> usize {
733 self.diagnostics_update_count
734 }
735
736 pub fn file_update_count(&self) -> usize {
737 self.file_update_count
738 }
739
740 pub fn git_diff_update_count(&self) -> usize {
741 self.git_diff_update_count
742 }
743
744 #[cfg(any(test, feature = "test-support"))]
745 pub fn is_parsing(&self) -> bool {
746 self.parsing_in_background
747 }
748
749 pub fn contains_unknown_injections(&self) -> bool {
750 self.syntax_map.lock().contains_unknown_injections()
751 }
752
753 #[cfg(test)]
754 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
755 self.sync_parse_timeout = timeout;
756 }
757
758 /// Called after an edit to synchronize the buffer's main parse tree with
759 /// the buffer's new underlying state.
760 ///
761 /// Locks the syntax map and interpolates the edits since the last reparse
762 /// into the foreground syntax tree.
763 ///
764 /// Then takes a stable snapshot of the syntax map before unlocking it.
765 /// The snapshot with the interpolated edits is sent to a background thread,
766 /// where we ask Tree-sitter to perform an incremental parse.
767 ///
768 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
769 /// waiting on the parse to complete. As soon as it completes, we proceed
770 /// synchronously, unless a 1ms timeout elapses.
771 ///
772 /// If we time out waiting on the parse, we spawn a second task waiting
773 /// until the parse does complete and return with the interpolated tree still
774 /// in the foreground. When the background parse completes, call back into
775 /// the main thread and assign the foreground parse state.
776 ///
777 /// If the buffer or grammar changed since the start of the background parse,
778 /// initiate an additional reparse recursively. To avoid concurrent parses
779 /// for the same buffer, we only initiate a new parse if we are not already
780 /// parsing in the background.
781 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
782 if self.parsing_in_background {
783 return;
784 }
785 let language = if let Some(language) = self.language.clone() {
786 language
787 } else {
788 return;
789 };
790
791 let text = self.text_snapshot();
792 let parsed_version = self.version();
793
794 let mut syntax_map = self.syntax_map.lock();
795 syntax_map.interpolate(&text);
796 let language_registry = syntax_map.language_registry();
797 let mut syntax_snapshot = syntax_map.snapshot();
798 drop(syntax_map);
799
800 let parse_task = cx.background().spawn({
801 let language = language.clone();
802 let language_registry = language_registry.clone();
803 async move {
804 syntax_snapshot.reparse(&text, language_registry, language);
805 syntax_snapshot
806 }
807 });
808
809 match cx
810 .background()
811 .block_with_timeout(self.sync_parse_timeout, parse_task)
812 {
813 Ok(new_syntax_snapshot) => {
814 self.did_finish_parsing(new_syntax_snapshot, cx);
815 return;
816 }
817 Err(parse_task) => {
818 self.parsing_in_background = true;
819 cx.spawn(move |this, mut cx| async move {
820 let new_syntax_map = parse_task.await;
821 this.update(&mut cx, move |this, cx| {
822 let grammar_changed =
823 this.language.as_ref().map_or(true, |current_language| {
824 !Arc::ptr_eq(&language, current_language)
825 });
826 let language_registry_changed = new_syntax_map
827 .contains_unknown_injections()
828 && language_registry.map_or(false, |registry| {
829 registry.version() != new_syntax_map.language_registry_version()
830 });
831 let parse_again = language_registry_changed
832 || grammar_changed
833 || this.version.changed_since(&parsed_version);
834 this.did_finish_parsing(new_syntax_map, cx);
835 this.parsing_in_background = false;
836 if parse_again {
837 this.reparse(cx);
838 }
839 });
840 })
841 .detach();
842 }
843 }
844 }
845
846 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
847 self.parse_count += 1;
848 self.syntax_map.lock().did_parse(syntax_snapshot);
849 self.request_autoindent(cx);
850 cx.emit(Event::Reparsed);
851 cx.notify();
852 }
853
854 pub fn update_diagnostics(
855 &mut self,
856 server_id: LanguageServerId,
857 diagnostics: DiagnosticSet,
858 cx: &mut ModelContext<Self>,
859 ) {
860 let lamport_timestamp = self.text.lamport_clock.tick();
861 let op = Operation::UpdateDiagnostics {
862 server_id,
863 diagnostics: diagnostics.iter().cloned().collect(),
864 lamport_timestamp,
865 };
866 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
867 self.send_operation(op, cx);
868 }
869
870 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
871 if let Some(indent_sizes) = self.compute_autoindents() {
872 let indent_sizes = cx.background().spawn(indent_sizes);
873 match cx
874 .background()
875 .block_with_timeout(Duration::from_micros(500), indent_sizes)
876 {
877 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
878 Err(indent_sizes) => {
879 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
880 let indent_sizes = indent_sizes.await;
881 this.update(&mut cx, |this, cx| {
882 this.apply_autoindents(indent_sizes, cx);
883 });
884 }));
885 }
886 }
887 } else {
888 self.autoindent_requests.clear();
889 }
890 }
891
892 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
893 let max_rows_between_yields = 100;
894 let snapshot = self.snapshot();
895 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
896 return None;
897 }
898
899 let autoindent_requests = self.autoindent_requests.clone();
900 Some(async move {
901 let mut indent_sizes = BTreeMap::new();
902 for request in autoindent_requests {
903 // Resolve each edited range to its row in the current buffer and in the
904 // buffer before this batch of edits.
905 let mut row_ranges = Vec::new();
906 let mut old_to_new_rows = BTreeMap::new();
907 let mut language_indent_sizes_by_new_row = Vec::new();
908 for entry in &request.entries {
909 let position = entry.range.start;
910 let new_row = position.to_point(&snapshot).row;
911 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
912 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
913
914 if !entry.first_line_is_new {
915 let old_row = position.to_point(&request.before_edit).row;
916 old_to_new_rows.insert(old_row, new_row);
917 }
918 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
919 }
920
921 // Build a map containing the suggested indentation for each of the edited lines
922 // with respect to the state of the buffer before these edits. This map is keyed
923 // by the rows for these lines in the current state of the buffer.
924 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
925 let old_edited_ranges =
926 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
927 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
928 let mut language_indent_size = IndentSize::default();
929 for old_edited_range in old_edited_ranges {
930 let suggestions = request
931 .before_edit
932 .suggest_autoindents(old_edited_range.clone())
933 .into_iter()
934 .flatten();
935 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
936 if let Some(suggestion) = suggestion {
937 let new_row = *old_to_new_rows.get(&old_row).unwrap();
938
939 // Find the indent size based on the language for this row.
940 while let Some((row, size)) = language_indent_sizes.peek() {
941 if *row > new_row {
942 break;
943 }
944 language_indent_size = *size;
945 language_indent_sizes.next();
946 }
947
948 let suggested_indent = old_to_new_rows
949 .get(&suggestion.basis_row)
950 .and_then(|from_row| {
951 Some(old_suggestions.get(from_row).copied()?.0)
952 })
953 .unwrap_or_else(|| {
954 request
955 .before_edit
956 .indent_size_for_line(suggestion.basis_row)
957 })
958 .with_delta(suggestion.delta, language_indent_size);
959 old_suggestions
960 .insert(new_row, (suggested_indent, suggestion.within_error));
961 }
962 }
963 yield_now().await;
964 }
965
966 // In block mode, only compute indentation suggestions for the first line
967 // of each insertion. Otherwise, compute suggestions for every inserted line.
968 let new_edited_row_ranges = contiguous_ranges(
969 row_ranges.iter().flat_map(|(range, _)| {
970 if request.is_block_mode {
971 range.start..range.start + 1
972 } else {
973 range.clone()
974 }
975 }),
976 max_rows_between_yields,
977 );
978
979 // Compute new suggestions for each line, but only include them in the result
980 // if they differ from the old suggestion for that line.
981 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
982 let mut language_indent_size = IndentSize::default();
983 for new_edited_row_range in new_edited_row_ranges {
984 let suggestions = snapshot
985 .suggest_autoindents(new_edited_row_range.clone())
986 .into_iter()
987 .flatten();
988 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
989 if let Some(suggestion) = suggestion {
990 // Find the indent size based on the language for this row.
991 while let Some((row, size)) = language_indent_sizes.peek() {
992 if *row > new_row {
993 break;
994 }
995 language_indent_size = *size;
996 language_indent_sizes.next();
997 }
998
999 let suggested_indent = indent_sizes
1000 .get(&suggestion.basis_row)
1001 .copied()
1002 .unwrap_or_else(|| {
1003 snapshot.indent_size_for_line(suggestion.basis_row)
1004 })
1005 .with_delta(suggestion.delta, language_indent_size);
1006 if old_suggestions.get(&new_row).map_or(
1007 true,
1008 |(old_indentation, was_within_error)| {
1009 suggested_indent != *old_indentation
1010 && (!suggestion.within_error || *was_within_error)
1011 },
1012 ) {
1013 indent_sizes.insert(new_row, suggested_indent);
1014 }
1015 }
1016 }
1017 yield_now().await;
1018 }
1019
1020 // For each block of inserted text, adjust the indentation of the remaining
1021 // lines of the block by the same amount as the first line was adjusted.
1022 if request.is_block_mode {
1023 for (row_range, original_indent_column) in
1024 row_ranges
1025 .into_iter()
1026 .filter_map(|(range, original_indent_column)| {
1027 if range.len() > 1 {
1028 Some((range, original_indent_column?))
1029 } else {
1030 None
1031 }
1032 })
1033 {
1034 let new_indent = indent_sizes
1035 .get(&row_range.start)
1036 .copied()
1037 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1038 let delta = new_indent.len as i64 - original_indent_column as i64;
1039 if delta != 0 {
1040 for row in row_range.skip(1) {
1041 indent_sizes.entry(row).or_insert_with(|| {
1042 let mut size = snapshot.indent_size_for_line(row);
1043 if size.kind == new_indent.kind {
1044 match delta.cmp(&0) {
1045 Ordering::Greater => size.len += delta as u32,
1046 Ordering::Less => {
1047 size.len = size.len.saturating_sub(-delta as u32)
1048 }
1049 Ordering::Equal => {}
1050 }
1051 }
1052 size
1053 });
1054 }
1055 }
1056 }
1057 }
1058 }
1059
1060 indent_sizes
1061 })
1062 }
1063
1064 fn apply_autoindents(
1065 &mut self,
1066 indent_sizes: BTreeMap<u32, IndentSize>,
1067 cx: &mut ModelContext<Self>,
1068 ) {
1069 self.autoindent_requests.clear();
1070
1071 let edits: Vec<_> = indent_sizes
1072 .into_iter()
1073 .filter_map(|(row, indent_size)| {
1074 let current_size = indent_size_for_line(self, row);
1075 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1076 })
1077 .collect();
1078
1079 self.edit(edits, None, cx);
1080 }
1081
1082 // Create a minimal edit that will cause the the given row to be indented
1083 // with the given size. After applying this edit, the length of the line
1084 // will always be at least `new_size.len`.
1085 pub fn edit_for_indent_size_adjustment(
1086 row: u32,
1087 current_size: IndentSize,
1088 new_size: IndentSize,
1089 ) -> Option<(Range<Point>, String)> {
1090 if new_size.kind != current_size.kind {
1091 Some((
1092 Point::new(row, 0)..Point::new(row, current_size.len),
1093 iter::repeat(new_size.char())
1094 .take(new_size.len as usize)
1095 .collect::<String>(),
1096 ))
1097 } else {
1098 match new_size.len.cmp(¤t_size.len) {
1099 Ordering::Greater => {
1100 let point = Point::new(row, 0);
1101 Some((
1102 point..point,
1103 iter::repeat(new_size.char())
1104 .take((new_size.len - current_size.len) as usize)
1105 .collect::<String>(),
1106 ))
1107 }
1108
1109 Ordering::Less => Some((
1110 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1111 String::new(),
1112 )),
1113
1114 Ordering::Equal => None,
1115 }
1116 }
1117 }
1118
1119 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1120 let old_text = self.as_rope().clone();
1121 let base_version = self.version();
1122 cx.background().spawn(async move {
1123 let old_text = old_text.to_string();
1124 let line_ending = LineEnding::detect(&new_text);
1125 LineEnding::normalize(&mut new_text);
1126 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1127 let mut edits = Vec::new();
1128 let mut offset = 0;
1129 let empty: Arc<str> = "".into();
1130 for change in diff.iter_all_changes() {
1131 let value = change.value();
1132 let end_offset = offset + value.len();
1133 match change.tag() {
1134 ChangeTag::Equal => {
1135 offset = end_offset;
1136 }
1137 ChangeTag::Delete => {
1138 edits.push((offset..end_offset, empty.clone()));
1139 offset = end_offset;
1140 }
1141 ChangeTag::Insert => {
1142 edits.push((offset..offset, value.into()));
1143 }
1144 }
1145 }
1146 Diff {
1147 base_version,
1148 line_ending,
1149 edits,
1150 }
1151 })
1152 }
1153
1154 /// Spawn a background task that searches the buffer for any whitespace
1155 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1156 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1157 let old_text = self.as_rope().clone();
1158 let line_ending = self.line_ending();
1159 let base_version = self.version();
1160 cx.background().spawn(async move {
1161 let ranges = trailing_whitespace_ranges(&old_text);
1162 let empty = Arc::<str>::from("");
1163 Diff {
1164 base_version,
1165 line_ending,
1166 edits: ranges
1167 .into_iter()
1168 .map(|range| (range, empty.clone()))
1169 .collect(),
1170 }
1171 })
1172 }
1173
1174 /// Ensure that the buffer ends with a single newline character, and
1175 /// no other whitespace.
1176 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1177 let len = self.len();
1178 let mut offset = len;
1179 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1180 let non_whitespace_len = chunk
1181 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1182 .len();
1183 offset -= chunk.len();
1184 offset += non_whitespace_len;
1185 if non_whitespace_len != 0 {
1186 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1187 return;
1188 }
1189 break;
1190 }
1191 }
1192 self.edit([(offset..len, "\n")], None, cx);
1193 }
1194
1195 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1196 /// calculated, then adjust the diff to account for those changes, and discard any
1197 /// parts of the diff that conflict with those changes.
1198 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1199 // Check for any edits to the buffer that have occurred since this diff
1200 // was computed.
1201 let snapshot = self.snapshot();
1202 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1203 let mut delta = 0;
1204 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1205 while let Some(edit_since) = edits_since.peek() {
1206 // If the edit occurs after a diff hunk, then it does not
1207 // affect that hunk.
1208 if edit_since.old.start > range.end {
1209 break;
1210 }
1211 // If the edit precedes the diff hunk, then adjust the hunk
1212 // to reflect the edit.
1213 else if edit_since.old.end < range.start {
1214 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1215 edits_since.next();
1216 }
1217 // If the edit intersects a diff hunk, then discard that hunk.
1218 else {
1219 return None;
1220 }
1221 }
1222
1223 let start = (range.start as i64 + delta) as usize;
1224 let end = (range.end as i64 + delta) as usize;
1225 Some((start..end, new_text))
1226 });
1227
1228 self.start_transaction();
1229 self.text.set_line_ending(diff.line_ending);
1230 self.edit(adjusted_edits, None, cx);
1231 self.end_transaction(cx)
1232 }
1233
1234 pub fn is_dirty(&self) -> bool {
1235 self.saved_version_fingerprint != self.as_rope().fingerprint()
1236 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1237 }
1238
1239 pub fn has_conflict(&self) -> bool {
1240 self.saved_version_fingerprint != self.as_rope().fingerprint()
1241 && self
1242 .file
1243 .as_ref()
1244 .map_or(false, |file| file.mtime() > self.saved_mtime)
1245 }
1246
1247 pub fn subscribe(&mut self) -> Subscription {
1248 self.text.subscribe()
1249 }
1250
1251 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1252 self.start_transaction_at(Instant::now())
1253 }
1254
1255 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1256 self.transaction_depth += 1;
1257 if self.was_dirty_before_starting_transaction.is_none() {
1258 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1259 }
1260 self.text.start_transaction_at(now)
1261 }
1262
1263 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1264 self.end_transaction_at(Instant::now(), cx)
1265 }
1266
1267 pub fn end_transaction_at(
1268 &mut self,
1269 now: Instant,
1270 cx: &mut ModelContext<Self>,
1271 ) -> Option<TransactionId> {
1272 assert!(self.transaction_depth > 0);
1273 self.transaction_depth -= 1;
1274 let was_dirty = if self.transaction_depth == 0 {
1275 self.was_dirty_before_starting_transaction.take().unwrap()
1276 } else {
1277 false
1278 };
1279 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1280 self.did_edit(&start_version, was_dirty, cx);
1281 Some(transaction_id)
1282 } else {
1283 None
1284 }
1285 }
1286
1287 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1288 self.text.push_transaction(transaction, now);
1289 }
1290
1291 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1292 self.text.finalize_last_transaction()
1293 }
1294
1295 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1296 self.text.group_until_transaction(transaction_id);
1297 }
1298
1299 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1300 self.text.forget_transaction(transaction_id);
1301 }
1302
1303 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1304 self.text.merge_transactions(transaction, destination);
1305 }
1306
1307 pub fn wait_for_edits(
1308 &mut self,
1309 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1310 ) -> impl Future<Output = Result<()>> {
1311 self.text.wait_for_edits(edit_ids)
1312 }
1313
1314 pub fn wait_for_anchors(
1315 &mut self,
1316 anchors: impl IntoIterator<Item = Anchor>,
1317 ) -> impl 'static + Future<Output = Result<()>> {
1318 self.text.wait_for_anchors(anchors)
1319 }
1320
1321 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1322 self.text.wait_for_version(version)
1323 }
1324
1325 pub fn give_up_waiting(&mut self) {
1326 self.text.give_up_waiting();
1327 }
1328
1329 pub fn set_active_selections(
1330 &mut self,
1331 selections: Arc<[Selection<Anchor>]>,
1332 line_mode: bool,
1333 cursor_shape: CursorShape,
1334 cx: &mut ModelContext<Self>,
1335 ) {
1336 let lamport_timestamp = self.text.lamport_clock.tick();
1337 self.remote_selections.insert(
1338 self.text.replica_id(),
1339 SelectionSet {
1340 selections: selections.clone(),
1341 lamport_timestamp,
1342 line_mode,
1343 cursor_shape,
1344 },
1345 );
1346 self.send_operation(
1347 Operation::UpdateSelections {
1348 selections,
1349 line_mode,
1350 lamport_timestamp,
1351 cursor_shape,
1352 },
1353 cx,
1354 );
1355 }
1356
1357 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1358 if self
1359 .remote_selections
1360 .get(&self.text.replica_id())
1361 .map_or(true, |set| !set.selections.is_empty())
1362 {
1363 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1364 }
1365 }
1366
1367 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1368 where
1369 T: Into<Arc<str>>,
1370 {
1371 self.autoindent_requests.clear();
1372 self.edit([(0..self.len(), text)], None, cx)
1373 }
1374
1375 pub fn edit<I, S, T>(
1376 &mut self,
1377 edits_iter: I,
1378 autoindent_mode: Option<AutoindentMode>,
1379 cx: &mut ModelContext<Self>,
1380 ) -> Option<clock::Lamport>
1381 where
1382 I: IntoIterator<Item = (Range<S>, T)>,
1383 S: ToOffset,
1384 T: Into<Arc<str>>,
1385 {
1386 // Skip invalid edits and coalesce contiguous ones.
1387 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1388 for (range, new_text) in edits_iter {
1389 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1390 if range.start > range.end {
1391 mem::swap(&mut range.start, &mut range.end);
1392 }
1393 let new_text = new_text.into();
1394 if !new_text.is_empty() || !range.is_empty() {
1395 if let Some((prev_range, prev_text)) = edits.last_mut() {
1396 if prev_range.end >= range.start {
1397 prev_range.end = cmp::max(prev_range.end, range.end);
1398 *prev_text = format!("{prev_text}{new_text}").into();
1399 } else {
1400 edits.push((range, new_text));
1401 }
1402 } else {
1403 edits.push((range, new_text));
1404 }
1405 }
1406 }
1407 if edits.is_empty() {
1408 return None;
1409 }
1410
1411 self.start_transaction();
1412 self.pending_autoindent.take();
1413 let autoindent_request = autoindent_mode
1414 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1415
1416 let edit_operation = self.text.edit(edits.iter().cloned());
1417 let edit_id = edit_operation.timestamp();
1418
1419 if let Some((before_edit, mode)) = autoindent_request {
1420 let mut delta = 0isize;
1421 let entries = edits
1422 .into_iter()
1423 .enumerate()
1424 .zip(&edit_operation.as_edit().unwrap().new_text)
1425 .map(|((ix, (range, _)), new_text)| {
1426 let new_text_length = new_text.len();
1427 let old_start = range.start.to_point(&before_edit);
1428 let new_start = (delta + range.start as isize) as usize;
1429 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1430
1431 let mut range_of_insertion_to_indent = 0..new_text_length;
1432 let mut first_line_is_new = false;
1433 let mut original_indent_column = None;
1434
1435 // When inserting an entire line at the beginning of an existing line,
1436 // treat the insertion as new.
1437 if new_text.contains('\n')
1438 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1439 {
1440 first_line_is_new = true;
1441 }
1442
1443 // When inserting text starting with a newline, avoid auto-indenting the
1444 // previous line.
1445 if new_text.starts_with('\n') {
1446 range_of_insertion_to_indent.start += 1;
1447 first_line_is_new = true;
1448 }
1449
1450 // Avoid auto-indenting after the insertion.
1451 if let AutoindentMode::Block {
1452 original_indent_columns,
1453 } = &mode
1454 {
1455 original_indent_column =
1456 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1457 indent_size_for_text(
1458 new_text[range_of_insertion_to_indent.clone()].chars(),
1459 )
1460 .len
1461 }));
1462 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1463 range_of_insertion_to_indent.end -= 1;
1464 }
1465 }
1466
1467 AutoindentRequestEntry {
1468 first_line_is_new,
1469 original_indent_column,
1470 indent_size: before_edit.language_indent_size_at(range.start, cx),
1471 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1472 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1473 }
1474 })
1475 .collect();
1476
1477 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1478 before_edit,
1479 entries,
1480 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1481 }));
1482 }
1483
1484 self.end_transaction(cx);
1485 self.send_operation(Operation::Buffer(edit_operation), cx);
1486 Some(edit_id)
1487 }
1488
1489 fn did_edit(
1490 &mut self,
1491 old_version: &clock::Global,
1492 was_dirty: bool,
1493 cx: &mut ModelContext<Self>,
1494 ) {
1495 if self.edits_since::<usize>(old_version).next().is_none() {
1496 return;
1497 }
1498
1499 self.reparse(cx);
1500
1501 cx.emit(Event::Edited);
1502 if was_dirty != self.is_dirty() {
1503 cx.emit(Event::DirtyChanged);
1504 }
1505 cx.notify();
1506 }
1507
1508 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1509 &mut self,
1510 ops: I,
1511 cx: &mut ModelContext<Self>,
1512 ) -> Result<()> {
1513 self.pending_autoindent.take();
1514 let was_dirty = self.is_dirty();
1515 let old_version = self.version.clone();
1516 let mut deferred_ops = Vec::new();
1517 let buffer_ops = ops
1518 .into_iter()
1519 .filter_map(|op| match op {
1520 Operation::Buffer(op) => Some(op),
1521 _ => {
1522 if self.can_apply_op(&op) {
1523 self.apply_op(op, cx);
1524 } else {
1525 deferred_ops.push(op);
1526 }
1527 None
1528 }
1529 })
1530 .collect::<Vec<_>>();
1531 self.text.apply_ops(buffer_ops)?;
1532 self.deferred_ops.insert(deferred_ops);
1533 self.flush_deferred_ops(cx);
1534 self.did_edit(&old_version, was_dirty, cx);
1535 // Notify independently of whether the buffer was edited as the operations could include a
1536 // selection update.
1537 cx.notify();
1538 Ok(())
1539 }
1540
1541 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1542 let mut deferred_ops = Vec::new();
1543 for op in self.deferred_ops.drain().iter().cloned() {
1544 if self.can_apply_op(&op) {
1545 self.apply_op(op, cx);
1546 } else {
1547 deferred_ops.push(op);
1548 }
1549 }
1550 self.deferred_ops.insert(deferred_ops);
1551 }
1552
1553 fn can_apply_op(&self, operation: &Operation) -> bool {
1554 match operation {
1555 Operation::Buffer(_) => {
1556 unreachable!("buffer operations should never be applied at this layer")
1557 }
1558 Operation::UpdateDiagnostics {
1559 diagnostics: diagnostic_set,
1560 ..
1561 } => diagnostic_set.iter().all(|diagnostic| {
1562 self.text.can_resolve(&diagnostic.range.start)
1563 && self.text.can_resolve(&diagnostic.range.end)
1564 }),
1565 Operation::UpdateSelections { selections, .. } => selections
1566 .iter()
1567 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1568 Operation::UpdateCompletionTriggers { .. } => true,
1569 }
1570 }
1571
1572 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1573 match operation {
1574 Operation::Buffer(_) => {
1575 unreachable!("buffer operations should never be applied at this layer")
1576 }
1577 Operation::UpdateDiagnostics {
1578 server_id,
1579 diagnostics: diagnostic_set,
1580 lamport_timestamp,
1581 } => {
1582 let snapshot = self.snapshot();
1583 self.apply_diagnostic_update(
1584 server_id,
1585 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1586 lamport_timestamp,
1587 cx,
1588 );
1589 }
1590 Operation::UpdateSelections {
1591 selections,
1592 lamport_timestamp,
1593 line_mode,
1594 cursor_shape,
1595 } => {
1596 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1597 if set.lamport_timestamp > lamport_timestamp {
1598 return;
1599 }
1600 }
1601
1602 self.remote_selections.insert(
1603 lamport_timestamp.replica_id,
1604 SelectionSet {
1605 selections,
1606 lamport_timestamp,
1607 line_mode,
1608 cursor_shape,
1609 },
1610 );
1611 self.text.lamport_clock.observe(lamport_timestamp);
1612 self.selections_update_count += 1;
1613 }
1614 Operation::UpdateCompletionTriggers {
1615 triggers,
1616 lamport_timestamp,
1617 } => {
1618 self.completion_triggers = triggers;
1619 self.text.lamport_clock.observe(lamport_timestamp);
1620 }
1621 }
1622 }
1623
1624 fn apply_diagnostic_update(
1625 &mut self,
1626 server_id: LanguageServerId,
1627 diagnostics: DiagnosticSet,
1628 lamport_timestamp: clock::Lamport,
1629 cx: &mut ModelContext<Self>,
1630 ) {
1631 if lamport_timestamp > self.diagnostics_timestamp {
1632 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1633 if diagnostics.len() == 0 {
1634 if let Ok(ix) = ix {
1635 self.diagnostics.remove(ix);
1636 }
1637 } else {
1638 match ix {
1639 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1640 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1641 };
1642 }
1643 self.diagnostics_timestamp = lamport_timestamp;
1644 self.diagnostics_update_count += 1;
1645 self.text.lamport_clock.observe(lamport_timestamp);
1646 cx.notify();
1647 cx.emit(Event::DiagnosticsUpdated);
1648 }
1649 }
1650
1651 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1652 cx.emit(Event::Operation(operation));
1653 }
1654
1655 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1656 self.remote_selections.remove(&replica_id);
1657 cx.notify();
1658 }
1659
1660 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1661 let was_dirty = self.is_dirty();
1662 let old_version = self.version.clone();
1663
1664 if let Some((transaction_id, operation)) = self.text.undo() {
1665 self.send_operation(Operation::Buffer(operation), cx);
1666 self.did_edit(&old_version, was_dirty, cx);
1667 Some(transaction_id)
1668 } else {
1669 None
1670 }
1671 }
1672
1673 pub fn undo_transaction(
1674 &mut self,
1675 transaction_id: TransactionId,
1676 cx: &mut ModelContext<Self>,
1677 ) -> bool {
1678 let was_dirty = self.is_dirty();
1679 let old_version = self.version.clone();
1680 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1681 self.send_operation(Operation::Buffer(operation), cx);
1682 self.did_edit(&old_version, was_dirty, cx);
1683 true
1684 } else {
1685 false
1686 }
1687 }
1688
1689 pub fn undo_to_transaction(
1690 &mut self,
1691 transaction_id: TransactionId,
1692 cx: &mut ModelContext<Self>,
1693 ) -> bool {
1694 let was_dirty = self.is_dirty();
1695 let old_version = self.version.clone();
1696
1697 let operations = self.text.undo_to_transaction(transaction_id);
1698 let undone = !operations.is_empty();
1699 for operation in operations {
1700 self.send_operation(Operation::Buffer(operation), cx);
1701 }
1702 if undone {
1703 self.did_edit(&old_version, was_dirty, cx)
1704 }
1705 undone
1706 }
1707
1708 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1709 let was_dirty = self.is_dirty();
1710 let old_version = self.version.clone();
1711
1712 if let Some((transaction_id, operation)) = self.text.redo() {
1713 self.send_operation(Operation::Buffer(operation), cx);
1714 self.did_edit(&old_version, was_dirty, cx);
1715 Some(transaction_id)
1716 } else {
1717 None
1718 }
1719 }
1720
1721 pub fn redo_to_transaction(
1722 &mut self,
1723 transaction_id: TransactionId,
1724 cx: &mut ModelContext<Self>,
1725 ) -> bool {
1726 let was_dirty = self.is_dirty();
1727 let old_version = self.version.clone();
1728
1729 let operations = self.text.redo_to_transaction(transaction_id);
1730 let redone = !operations.is_empty();
1731 for operation in operations {
1732 self.send_operation(Operation::Buffer(operation), cx);
1733 }
1734 if redone {
1735 self.did_edit(&old_version, was_dirty, cx)
1736 }
1737 redone
1738 }
1739
1740 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1741 self.completion_triggers = triggers.clone();
1742 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1743 self.send_operation(
1744 Operation::UpdateCompletionTriggers {
1745 triggers,
1746 lamport_timestamp: self.completion_triggers_timestamp,
1747 },
1748 cx,
1749 );
1750 cx.notify();
1751 }
1752
1753 pub fn completion_triggers(&self) -> &[String] {
1754 &self.completion_triggers
1755 }
1756}
1757
1758#[cfg(any(test, feature = "test-support"))]
1759impl Buffer {
1760 pub fn edit_via_marked_text(
1761 &mut self,
1762 marked_string: &str,
1763 autoindent_mode: Option<AutoindentMode>,
1764 cx: &mut ModelContext<Self>,
1765 ) {
1766 let edits = self.edits_for_marked_text(marked_string);
1767 self.edit(edits, autoindent_mode, cx);
1768 }
1769
1770 pub fn set_group_interval(&mut self, group_interval: Duration) {
1771 self.text.set_group_interval(group_interval);
1772 }
1773
1774 pub fn randomly_edit<T>(
1775 &mut self,
1776 rng: &mut T,
1777 old_range_count: usize,
1778 cx: &mut ModelContext<Self>,
1779 ) where
1780 T: rand::Rng,
1781 {
1782 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1783 let mut last_end = None;
1784 for _ in 0..old_range_count {
1785 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1786 break;
1787 }
1788
1789 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1790 let mut range = self.random_byte_range(new_start, rng);
1791 if rng.gen_bool(0.2) {
1792 mem::swap(&mut range.start, &mut range.end);
1793 }
1794 last_end = Some(range.end);
1795
1796 let new_text_len = rng.gen_range(0..10);
1797 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1798
1799 edits.push((range, new_text));
1800 }
1801 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1802 self.edit(edits, None, cx);
1803 }
1804
1805 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1806 let was_dirty = self.is_dirty();
1807 let old_version = self.version.clone();
1808
1809 let ops = self.text.randomly_undo_redo(rng);
1810 if !ops.is_empty() {
1811 for op in ops {
1812 self.send_operation(Operation::Buffer(op), cx);
1813 self.did_edit(&old_version, was_dirty, cx);
1814 }
1815 }
1816 }
1817}
1818
1819impl Entity for Buffer {
1820 type Event = Event;
1821}
1822
1823impl Deref for Buffer {
1824 type Target = TextBuffer;
1825
1826 fn deref(&self) -> &Self::Target {
1827 &self.text
1828 }
1829}
1830
1831impl BufferSnapshot {
1832 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1833 indent_size_for_line(self, row)
1834 }
1835
1836 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1837 let settings = language_settings(self.language_at(position), self.file(), cx);
1838 if settings.hard_tabs {
1839 IndentSize::tab()
1840 } else {
1841 IndentSize::spaces(settings.tab_size.get())
1842 }
1843 }
1844
1845 pub fn suggested_indents(
1846 &self,
1847 rows: impl Iterator<Item = u32>,
1848 single_indent_size: IndentSize,
1849 ) -> BTreeMap<u32, IndentSize> {
1850 let mut result = BTreeMap::new();
1851
1852 for row_range in contiguous_ranges(rows, 10) {
1853 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1854 Some(suggestions) => suggestions,
1855 _ => break,
1856 };
1857
1858 for (row, suggestion) in row_range.zip(suggestions) {
1859 let indent_size = if let Some(suggestion) = suggestion {
1860 result
1861 .get(&suggestion.basis_row)
1862 .copied()
1863 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1864 .with_delta(suggestion.delta, single_indent_size)
1865 } else {
1866 self.indent_size_for_line(row)
1867 };
1868
1869 result.insert(row, indent_size);
1870 }
1871 }
1872
1873 result
1874 }
1875
1876 fn suggest_autoindents(
1877 &self,
1878 row_range: Range<u32>,
1879 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1880 let config = &self.language.as_ref()?.config;
1881 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1882
1883 // Find the suggested indentation ranges based on the syntax tree.
1884 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1885 let end = Point::new(row_range.end, 0);
1886 let range = (start..end).to_offset(&self.text);
1887 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1888 Some(&grammar.indents_config.as_ref()?.query)
1889 });
1890 let indent_configs = matches
1891 .grammars()
1892 .iter()
1893 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1894 .collect::<Vec<_>>();
1895
1896 let mut indent_ranges = Vec::<Range<Point>>::new();
1897 let mut outdent_positions = Vec::<Point>::new();
1898 while let Some(mat) = matches.peek() {
1899 let mut start: Option<Point> = None;
1900 let mut end: Option<Point> = None;
1901
1902 let config = &indent_configs[mat.grammar_index];
1903 for capture in mat.captures {
1904 if capture.index == config.indent_capture_ix {
1905 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1906 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1907 } else if Some(capture.index) == config.start_capture_ix {
1908 start = Some(Point::from_ts_point(capture.node.end_position()));
1909 } else if Some(capture.index) == config.end_capture_ix {
1910 end = Some(Point::from_ts_point(capture.node.start_position()));
1911 } else if Some(capture.index) == config.outdent_capture_ix {
1912 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1913 }
1914 }
1915
1916 matches.advance();
1917 if let Some((start, end)) = start.zip(end) {
1918 if start.row == end.row {
1919 continue;
1920 }
1921
1922 let range = start..end;
1923 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1924 Err(ix) => indent_ranges.insert(ix, range),
1925 Ok(ix) => {
1926 let prev_range = &mut indent_ranges[ix];
1927 prev_range.end = prev_range.end.max(range.end);
1928 }
1929 }
1930 }
1931 }
1932
1933 let mut error_ranges = Vec::<Range<Point>>::new();
1934 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1935 Some(&grammar.error_query)
1936 });
1937 while let Some(mat) = matches.peek() {
1938 let node = mat.captures[0].node;
1939 let start = Point::from_ts_point(node.start_position());
1940 let end = Point::from_ts_point(node.end_position());
1941 let range = start..end;
1942 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1943 Ok(ix) | Err(ix) => ix,
1944 };
1945 let mut end_ix = ix;
1946 while let Some(existing_range) = error_ranges.get(end_ix) {
1947 if existing_range.end < end {
1948 end_ix += 1;
1949 } else {
1950 break;
1951 }
1952 }
1953 error_ranges.splice(ix..end_ix, [range]);
1954 matches.advance();
1955 }
1956
1957 outdent_positions.sort();
1958 for outdent_position in outdent_positions {
1959 // find the innermost indent range containing this outdent_position
1960 // set its end to the outdent position
1961 if let Some(range_to_truncate) = indent_ranges
1962 .iter_mut()
1963 .filter(|indent_range| indent_range.contains(&outdent_position))
1964 .last()
1965 {
1966 range_to_truncate.end = outdent_position;
1967 }
1968 }
1969
1970 // Find the suggested indentation increases and decreased based on regexes.
1971 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1972 self.for_each_line(
1973 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1974 ..Point::new(row_range.end, 0),
1975 |row, line| {
1976 if config
1977 .decrease_indent_pattern
1978 .as_ref()
1979 .map_or(false, |regex| regex.is_match(line))
1980 {
1981 indent_change_rows.push((row, Ordering::Less));
1982 }
1983 if config
1984 .increase_indent_pattern
1985 .as_ref()
1986 .map_or(false, |regex| regex.is_match(line))
1987 {
1988 indent_change_rows.push((row + 1, Ordering::Greater));
1989 }
1990 },
1991 );
1992
1993 let mut indent_changes = indent_change_rows.into_iter().peekable();
1994 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1995 prev_non_blank_row.unwrap_or(0)
1996 } else {
1997 row_range.start.saturating_sub(1)
1998 };
1999 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2000 Some(row_range.map(move |row| {
2001 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2002
2003 let mut indent_from_prev_row = false;
2004 let mut outdent_from_prev_row = false;
2005 let mut outdent_to_row = u32::MAX;
2006
2007 while let Some((indent_row, delta)) = indent_changes.peek() {
2008 match indent_row.cmp(&row) {
2009 Ordering::Equal => match delta {
2010 Ordering::Less => outdent_from_prev_row = true,
2011 Ordering::Greater => indent_from_prev_row = true,
2012 _ => {}
2013 },
2014
2015 Ordering::Greater => break,
2016 Ordering::Less => {}
2017 }
2018
2019 indent_changes.next();
2020 }
2021
2022 for range in &indent_ranges {
2023 if range.start.row >= row {
2024 break;
2025 }
2026 if range.start.row == prev_row && range.end > row_start {
2027 indent_from_prev_row = true;
2028 }
2029 if range.end > prev_row_start && range.end <= row_start {
2030 outdent_to_row = outdent_to_row.min(range.start.row);
2031 }
2032 }
2033
2034 let within_error = error_ranges
2035 .iter()
2036 .any(|e| e.start.row < row && e.end > row_start);
2037
2038 let suggestion = if outdent_to_row == prev_row
2039 || (outdent_from_prev_row && indent_from_prev_row)
2040 {
2041 Some(IndentSuggestion {
2042 basis_row: prev_row,
2043 delta: Ordering::Equal,
2044 within_error,
2045 })
2046 } else if indent_from_prev_row {
2047 Some(IndentSuggestion {
2048 basis_row: prev_row,
2049 delta: Ordering::Greater,
2050 within_error,
2051 })
2052 } else if outdent_to_row < prev_row {
2053 Some(IndentSuggestion {
2054 basis_row: outdent_to_row,
2055 delta: Ordering::Equal,
2056 within_error,
2057 })
2058 } else if outdent_from_prev_row {
2059 Some(IndentSuggestion {
2060 basis_row: prev_row,
2061 delta: Ordering::Less,
2062 within_error,
2063 })
2064 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2065 {
2066 Some(IndentSuggestion {
2067 basis_row: prev_row,
2068 delta: Ordering::Equal,
2069 within_error,
2070 })
2071 } else {
2072 None
2073 };
2074
2075 prev_row = row;
2076 prev_row_start = row_start;
2077 suggestion
2078 }))
2079 }
2080
2081 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2082 while row > 0 {
2083 row -= 1;
2084 if !self.is_line_blank(row) {
2085 return Some(row);
2086 }
2087 }
2088 None
2089 }
2090
2091 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2092 let range = range.start.to_offset(self)..range.end.to_offset(self);
2093
2094 let mut syntax = None;
2095 let mut diagnostic_endpoints = Vec::new();
2096 if language_aware {
2097 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2098 grammar.highlights_query.as_ref()
2099 });
2100 let highlight_maps = captures
2101 .grammars()
2102 .into_iter()
2103 .map(|grammar| grammar.highlight_map())
2104 .collect();
2105 syntax = Some((captures, highlight_maps));
2106 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2107 diagnostic_endpoints.push(DiagnosticEndpoint {
2108 offset: entry.range.start,
2109 is_start: true,
2110 severity: entry.diagnostic.severity,
2111 is_unnecessary: entry.diagnostic.is_unnecessary,
2112 });
2113 diagnostic_endpoints.push(DiagnosticEndpoint {
2114 offset: entry.range.end,
2115 is_start: false,
2116 severity: entry.diagnostic.severity,
2117 is_unnecessary: entry.diagnostic.is_unnecessary,
2118 });
2119 }
2120 diagnostic_endpoints
2121 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2122 }
2123
2124 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2125 }
2126
2127 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2128 let mut line = String::new();
2129 let mut row = range.start.row;
2130 for chunk in self
2131 .as_rope()
2132 .chunks_in_range(range.to_offset(self))
2133 .chain(["\n"])
2134 {
2135 for (newline_ix, text) in chunk.split('\n').enumerate() {
2136 if newline_ix > 0 {
2137 callback(row, &line);
2138 row += 1;
2139 line.clear();
2140 }
2141 line.push_str(text);
2142 }
2143 }
2144 }
2145
2146 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2147 self.syntax.layers_for_range(0..self.len(), &self.text)
2148 }
2149
2150 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2151 let offset = position.to_offset(self);
2152 self.syntax
2153 .layers_for_range(offset..offset, &self.text)
2154 .filter(|l| l.node().end_byte() > offset)
2155 .last()
2156 }
2157
2158 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2159 self.syntax_layer_at(position)
2160 .map(|info| info.language)
2161 .or(self.language.as_ref())
2162 }
2163
2164 pub fn settings_at<'a, D: ToOffset>(
2165 &self,
2166 position: D,
2167 cx: &'a AppContext,
2168 ) -> &'a LanguageSettings {
2169 language_settings(self.language_at(position), self.file.as_ref(), cx)
2170 }
2171
2172 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2173 let offset = position.to_offset(self);
2174 let mut scope = None;
2175 let mut smallest_range: Option<Range<usize>> = None;
2176
2177 // Use the layer that has the smallest node intersecting the given point.
2178 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2179 let mut cursor = layer.node().walk();
2180
2181 let mut range = None;
2182 loop {
2183 let child_range = cursor.node().byte_range();
2184 if !child_range.to_inclusive().contains(&offset) {
2185 break;
2186 }
2187
2188 range = Some(child_range);
2189 if cursor.goto_first_child_for_byte(offset).is_none() {
2190 break;
2191 }
2192 }
2193
2194 if let Some(range) = range {
2195 if smallest_range
2196 .as_ref()
2197 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2198 {
2199 smallest_range = Some(range);
2200 scope = Some(LanguageScope {
2201 language: layer.language.clone(),
2202 override_id: layer.override_id(offset, &self.text),
2203 });
2204 }
2205 }
2206 }
2207
2208 scope.or_else(|| {
2209 self.language.clone().map(|language| LanguageScope {
2210 language,
2211 override_id: None,
2212 })
2213 })
2214 }
2215
2216 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2217 let mut start = start.to_offset(self);
2218 let mut end = start;
2219 let mut next_chars = self.chars_at(start).peekable();
2220 let mut prev_chars = self.reversed_chars_at(start).peekable();
2221
2222 let scope = self.language_scope_at(start);
2223 let kind = |c| char_kind(&scope, c);
2224 let word_kind = cmp::max(
2225 prev_chars.peek().copied().map(kind),
2226 next_chars.peek().copied().map(kind),
2227 );
2228
2229 for ch in prev_chars {
2230 if Some(kind(ch)) == word_kind && ch != '\n' {
2231 start -= ch.len_utf8();
2232 } else {
2233 break;
2234 }
2235 }
2236
2237 for ch in next_chars {
2238 if Some(kind(ch)) == word_kind && ch != '\n' {
2239 end += ch.len_utf8();
2240 } else {
2241 break;
2242 }
2243 }
2244
2245 (start..end, word_kind)
2246 }
2247
2248 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2249 let range = range.start.to_offset(self)..range.end.to_offset(self);
2250 let mut result: Option<Range<usize>> = None;
2251 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2252 let mut cursor = layer.node().walk();
2253
2254 // Descend to the first leaf that touches the start of the range,
2255 // and if the range is non-empty, extends beyond the start.
2256 while cursor.goto_first_child_for_byte(range.start).is_some() {
2257 if !range.is_empty() && cursor.node().end_byte() == range.start {
2258 cursor.goto_next_sibling();
2259 }
2260 }
2261
2262 // Ascend to the smallest ancestor that strictly contains the range.
2263 loop {
2264 let node_range = cursor.node().byte_range();
2265 if node_range.start <= range.start
2266 && node_range.end >= range.end
2267 && node_range.len() > range.len()
2268 {
2269 break;
2270 }
2271 if !cursor.goto_parent() {
2272 continue 'outer;
2273 }
2274 }
2275
2276 let left_node = cursor.node();
2277 let mut layer_result = left_node.byte_range();
2278
2279 // For an empty range, try to find another node immediately to the right of the range.
2280 if left_node.end_byte() == range.start {
2281 let mut right_node = None;
2282 while !cursor.goto_next_sibling() {
2283 if !cursor.goto_parent() {
2284 break;
2285 }
2286 }
2287
2288 while cursor.node().start_byte() == range.start {
2289 right_node = Some(cursor.node());
2290 if !cursor.goto_first_child() {
2291 break;
2292 }
2293 }
2294
2295 // If there is a candidate node on both sides of the (empty) range, then
2296 // decide between the two by favoring a named node over an anonymous token.
2297 // If both nodes are the same in that regard, favor the right one.
2298 if let Some(right_node) = right_node {
2299 if right_node.is_named() || !left_node.is_named() {
2300 layer_result = right_node.byte_range();
2301 }
2302 }
2303 }
2304
2305 if let Some(previous_result) = &result {
2306 if previous_result.len() < layer_result.len() {
2307 continue;
2308 }
2309 }
2310 result = Some(layer_result);
2311 }
2312
2313 result
2314 }
2315
2316 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2317 self.outline_items_containing(0..self.len(), true, theme)
2318 .map(Outline::new)
2319 }
2320
2321 pub fn symbols_containing<T: ToOffset>(
2322 &self,
2323 position: T,
2324 theme: Option<&SyntaxTheme>,
2325 ) -> Option<Vec<OutlineItem<Anchor>>> {
2326 let position = position.to_offset(self);
2327 let mut items = self.outline_items_containing(
2328 position.saturating_sub(1)..self.len().min(position + 1),
2329 false,
2330 theme,
2331 )?;
2332 let mut prev_depth = None;
2333 items.retain(|item| {
2334 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2335 prev_depth = Some(item.depth);
2336 result
2337 });
2338 Some(items)
2339 }
2340
2341 fn outline_items_containing(
2342 &self,
2343 range: Range<usize>,
2344 include_extra_context: bool,
2345 theme: Option<&SyntaxTheme>,
2346 ) -> Option<Vec<OutlineItem<Anchor>>> {
2347 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2348 grammar.outline_config.as_ref().map(|c| &c.query)
2349 });
2350 let configs = matches
2351 .grammars()
2352 .iter()
2353 .map(|g| g.outline_config.as_ref().unwrap())
2354 .collect::<Vec<_>>();
2355
2356 let mut stack = Vec::<Range<usize>>::new();
2357 let mut items = Vec::new();
2358 while let Some(mat) = matches.peek() {
2359 let config = &configs[mat.grammar_index];
2360 let item_node = mat.captures.iter().find_map(|cap| {
2361 if cap.index == config.item_capture_ix {
2362 Some(cap.node)
2363 } else {
2364 None
2365 }
2366 })?;
2367
2368 let item_range = item_node.byte_range();
2369 if item_range.end < range.start || item_range.start > range.end {
2370 matches.advance();
2371 continue;
2372 }
2373
2374 let mut buffer_ranges = Vec::new();
2375 for capture in mat.captures {
2376 let node_is_name;
2377 if capture.index == config.name_capture_ix {
2378 node_is_name = true;
2379 } else if Some(capture.index) == config.context_capture_ix
2380 || (Some(capture.index) == config.extra_context_capture_ix
2381 && include_extra_context)
2382 {
2383 node_is_name = false;
2384 } else {
2385 continue;
2386 }
2387
2388 let mut range = capture.node.start_byte()..capture.node.end_byte();
2389 let start = capture.node.start_position();
2390 if capture.node.end_position().row > start.row {
2391 range.end =
2392 range.start + self.line_len(start.row as u32) as usize - start.column;
2393 }
2394
2395 buffer_ranges.push((range, node_is_name));
2396 }
2397
2398 if buffer_ranges.is_empty() {
2399 continue;
2400 }
2401
2402 let mut text = String::new();
2403 let mut highlight_ranges = Vec::new();
2404 let mut name_ranges = Vec::new();
2405 let mut chunks = self.chunks(
2406 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2407 true,
2408 );
2409 let mut last_buffer_range_end = 0;
2410 for (buffer_range, is_name) in buffer_ranges {
2411 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2412 text.push(' ');
2413 }
2414 last_buffer_range_end = buffer_range.end;
2415 if is_name {
2416 let mut start = text.len();
2417 let end = start + buffer_range.len();
2418
2419 // When multiple names are captured, then the matcheable text
2420 // includes the whitespace in between the names.
2421 if !name_ranges.is_empty() {
2422 start -= 1;
2423 }
2424
2425 name_ranges.push(start..end);
2426 }
2427
2428 let mut offset = buffer_range.start;
2429 chunks.seek(offset);
2430 for mut chunk in chunks.by_ref() {
2431 if chunk.text.len() > buffer_range.end - offset {
2432 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2433 offset = buffer_range.end;
2434 } else {
2435 offset += chunk.text.len();
2436 }
2437 let style = chunk
2438 .syntax_highlight_id
2439 .zip(theme)
2440 .and_then(|(highlight, theme)| highlight.style(theme));
2441 if let Some(style) = style {
2442 let start = text.len();
2443 let end = start + chunk.text.len();
2444 highlight_ranges.push((start..end, style));
2445 }
2446 text.push_str(chunk.text);
2447 if offset >= buffer_range.end {
2448 break;
2449 }
2450 }
2451 }
2452
2453 matches.advance();
2454 while stack.last().map_or(false, |prev_range| {
2455 prev_range.start > item_range.start || prev_range.end < item_range.end
2456 }) {
2457 stack.pop();
2458 }
2459 stack.push(item_range.clone());
2460
2461 items.push(OutlineItem {
2462 depth: stack.len() - 1,
2463 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2464 text,
2465 highlight_ranges,
2466 name_ranges,
2467 })
2468 }
2469 Some(items)
2470 }
2471
2472 pub fn matches(
2473 &self,
2474 range: Range<usize>,
2475 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2476 ) -> SyntaxMapMatches {
2477 self.syntax.matches(range, self, query)
2478 }
2479
2480 /// Returns bracket range pairs overlapping or adjacent to `range`
2481 pub fn bracket_ranges<'a, T: ToOffset>(
2482 &'a self,
2483 range: Range<T>,
2484 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2485 // Find bracket pairs that *inclusively* contain the given range.
2486 let range = range.start.to_offset(self).saturating_sub(1)
2487 ..self.len().min(range.end.to_offset(self) + 1);
2488
2489 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2490 grammar.brackets_config.as_ref().map(|c| &c.query)
2491 });
2492 let configs = matches
2493 .grammars()
2494 .iter()
2495 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2496 .collect::<Vec<_>>();
2497
2498 iter::from_fn(move || {
2499 while let Some(mat) = matches.peek() {
2500 let mut open = None;
2501 let mut close = None;
2502 let config = &configs[mat.grammar_index];
2503 for capture in mat.captures {
2504 if capture.index == config.open_capture_ix {
2505 open = Some(capture.node.byte_range());
2506 } else if capture.index == config.close_capture_ix {
2507 close = Some(capture.node.byte_range());
2508 }
2509 }
2510
2511 matches.advance();
2512
2513 let Some((open, close)) = open.zip(close) else {
2514 continue;
2515 };
2516
2517 let bracket_range = open.start..=close.end;
2518 if !bracket_range.overlaps(&range) {
2519 continue;
2520 }
2521
2522 return Some((open, close));
2523 }
2524 None
2525 })
2526 }
2527
2528 #[allow(clippy::type_complexity)]
2529 pub fn remote_selections_in_range(
2530 &self,
2531 range: Range<Anchor>,
2532 ) -> impl Iterator<
2533 Item = (
2534 ReplicaId,
2535 bool,
2536 CursorShape,
2537 impl Iterator<Item = &Selection<Anchor>> + '_,
2538 ),
2539 > + '_ {
2540 self.remote_selections
2541 .iter()
2542 .filter(|(replica_id, set)| {
2543 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2544 })
2545 .map(move |(replica_id, set)| {
2546 let start_ix = match set.selections.binary_search_by(|probe| {
2547 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2548 }) {
2549 Ok(ix) | Err(ix) => ix,
2550 };
2551 let end_ix = match set.selections.binary_search_by(|probe| {
2552 probe.start.cmp(&range.end, self).then(Ordering::Less)
2553 }) {
2554 Ok(ix) | Err(ix) => ix,
2555 };
2556
2557 (
2558 *replica_id,
2559 set.line_mode,
2560 set.cursor_shape,
2561 set.selections[start_ix..end_ix].iter(),
2562 )
2563 })
2564 }
2565
2566 pub fn git_diff_hunks_in_row_range<'a>(
2567 &'a self,
2568 range: Range<u32>,
2569 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2570 self.git_diff.hunks_in_row_range(range, self)
2571 }
2572
2573 pub fn git_diff_hunks_intersecting_range<'a>(
2574 &'a self,
2575 range: Range<Anchor>,
2576 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2577 self.git_diff.hunks_intersecting_range(range, self)
2578 }
2579
2580 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2581 &'a self,
2582 range: Range<Anchor>,
2583 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2584 self.git_diff.hunks_intersecting_range_rev(range, self)
2585 }
2586
2587 pub fn diagnostics_in_range<'a, T, O>(
2588 &'a self,
2589 search_range: Range<T>,
2590 reversed: bool,
2591 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2592 where
2593 T: 'a + Clone + ToOffset,
2594 O: 'a + FromAnchor + Ord,
2595 {
2596 let mut iterators: Vec<_> = self
2597 .diagnostics
2598 .iter()
2599 .map(|(_, collection)| {
2600 collection
2601 .range::<T, O>(search_range.clone(), self, true, reversed)
2602 .peekable()
2603 })
2604 .collect();
2605
2606 std::iter::from_fn(move || {
2607 let (next_ix, _) = iterators
2608 .iter_mut()
2609 .enumerate()
2610 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2611 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2612 iterators[next_ix].next()
2613 })
2614 }
2615
2616 pub fn diagnostic_groups(
2617 &self,
2618 language_server_id: Option<LanguageServerId>,
2619 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2620 let mut groups = Vec::new();
2621
2622 if let Some(language_server_id) = language_server_id {
2623 if let Ok(ix) = self
2624 .diagnostics
2625 .binary_search_by_key(&language_server_id, |e| e.0)
2626 {
2627 self.diagnostics[ix]
2628 .1
2629 .groups(language_server_id, &mut groups, self);
2630 }
2631 } else {
2632 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2633 diagnostics.groups(*language_server_id, &mut groups, self);
2634 }
2635 }
2636
2637 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2638 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2639 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2640 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2641 });
2642
2643 groups
2644 }
2645
2646 pub fn diagnostic_group<'a, O>(
2647 &'a self,
2648 group_id: usize,
2649 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2650 where
2651 O: 'a + FromAnchor,
2652 {
2653 self.diagnostics
2654 .iter()
2655 .flat_map(move |(_, set)| set.group(group_id, self))
2656 }
2657
2658 pub fn diagnostics_update_count(&self) -> usize {
2659 self.diagnostics_update_count
2660 }
2661
2662 pub fn parse_count(&self) -> usize {
2663 self.parse_count
2664 }
2665
2666 pub fn selections_update_count(&self) -> usize {
2667 self.selections_update_count
2668 }
2669
2670 pub fn file(&self) -> Option<&Arc<dyn File>> {
2671 self.file.as_ref()
2672 }
2673
2674 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2675 if let Some(file) = self.file() {
2676 if file.path().file_name().is_none() || include_root {
2677 Some(file.full_path(cx))
2678 } else {
2679 Some(file.path().to_path_buf())
2680 }
2681 } else {
2682 None
2683 }
2684 }
2685
2686 pub fn file_update_count(&self) -> usize {
2687 self.file_update_count
2688 }
2689
2690 pub fn git_diff_update_count(&self) -> usize {
2691 self.git_diff_update_count
2692 }
2693}
2694
2695fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2696 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2697}
2698
2699pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2700 let mut result = IndentSize::spaces(0);
2701 for c in text {
2702 let kind = match c {
2703 ' ' => IndentKind::Space,
2704 '\t' => IndentKind::Tab,
2705 _ => break,
2706 };
2707 if result.len == 0 {
2708 result.kind = kind;
2709 }
2710 result.len += 1;
2711 }
2712 result
2713}
2714
2715impl Clone for BufferSnapshot {
2716 fn clone(&self) -> Self {
2717 Self {
2718 text: self.text.clone(),
2719 git_diff: self.git_diff.clone(),
2720 syntax: self.syntax.clone(),
2721 file: self.file.clone(),
2722 remote_selections: self.remote_selections.clone(),
2723 diagnostics: self.diagnostics.clone(),
2724 selections_update_count: self.selections_update_count,
2725 diagnostics_update_count: self.diagnostics_update_count,
2726 file_update_count: self.file_update_count,
2727 git_diff_update_count: self.git_diff_update_count,
2728 language: self.language.clone(),
2729 parse_count: self.parse_count,
2730 }
2731 }
2732}
2733
2734impl Deref for BufferSnapshot {
2735 type Target = text::BufferSnapshot;
2736
2737 fn deref(&self) -> &Self::Target {
2738 &self.text
2739 }
2740}
2741
2742unsafe impl<'a> Send for BufferChunks<'a> {}
2743
2744impl<'a> BufferChunks<'a> {
2745 pub(crate) fn new(
2746 text: &'a Rope,
2747 range: Range<usize>,
2748 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2749 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2750 ) -> Self {
2751 let mut highlights = None;
2752 if let Some((captures, highlight_maps)) = syntax {
2753 highlights = Some(BufferChunkHighlights {
2754 captures,
2755 next_capture: None,
2756 stack: Default::default(),
2757 highlight_maps,
2758 })
2759 }
2760
2761 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2762 let chunks = text.chunks_in_range(range.clone());
2763
2764 BufferChunks {
2765 range,
2766 chunks,
2767 diagnostic_endpoints,
2768 error_depth: 0,
2769 warning_depth: 0,
2770 information_depth: 0,
2771 hint_depth: 0,
2772 unnecessary_depth: 0,
2773 highlights,
2774 }
2775 }
2776
2777 pub fn seek(&mut self, offset: usize) {
2778 self.range.start = offset;
2779 self.chunks.seek(self.range.start);
2780 if let Some(highlights) = self.highlights.as_mut() {
2781 highlights
2782 .stack
2783 .retain(|(end_offset, _)| *end_offset > offset);
2784 if let Some(capture) = &highlights.next_capture {
2785 if offset >= capture.node.start_byte() {
2786 let next_capture_end = capture.node.end_byte();
2787 if offset < next_capture_end {
2788 highlights.stack.push((
2789 next_capture_end,
2790 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2791 ));
2792 }
2793 highlights.next_capture.take();
2794 }
2795 }
2796 highlights.captures.set_byte_range(self.range.clone());
2797 }
2798 }
2799
2800 pub fn offset(&self) -> usize {
2801 self.range.start
2802 }
2803
2804 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2805 let depth = match endpoint.severity {
2806 DiagnosticSeverity::ERROR => &mut self.error_depth,
2807 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2808 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2809 DiagnosticSeverity::HINT => &mut self.hint_depth,
2810 _ => return,
2811 };
2812 if endpoint.is_start {
2813 *depth += 1;
2814 } else {
2815 *depth -= 1;
2816 }
2817
2818 if endpoint.is_unnecessary {
2819 if endpoint.is_start {
2820 self.unnecessary_depth += 1;
2821 } else {
2822 self.unnecessary_depth -= 1;
2823 }
2824 }
2825 }
2826
2827 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2828 if self.error_depth > 0 {
2829 Some(DiagnosticSeverity::ERROR)
2830 } else if self.warning_depth > 0 {
2831 Some(DiagnosticSeverity::WARNING)
2832 } else if self.information_depth > 0 {
2833 Some(DiagnosticSeverity::INFORMATION)
2834 } else if self.hint_depth > 0 {
2835 Some(DiagnosticSeverity::HINT)
2836 } else {
2837 None
2838 }
2839 }
2840
2841 fn current_code_is_unnecessary(&self) -> bool {
2842 self.unnecessary_depth > 0
2843 }
2844}
2845
2846impl<'a> Iterator for BufferChunks<'a> {
2847 type Item = Chunk<'a>;
2848
2849 fn next(&mut self) -> Option<Self::Item> {
2850 let mut next_capture_start = usize::MAX;
2851 let mut next_diagnostic_endpoint = usize::MAX;
2852
2853 if let Some(highlights) = self.highlights.as_mut() {
2854 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2855 if *parent_capture_end <= self.range.start {
2856 highlights.stack.pop();
2857 } else {
2858 break;
2859 }
2860 }
2861
2862 if highlights.next_capture.is_none() {
2863 highlights.next_capture = highlights.captures.next();
2864 }
2865
2866 while let Some(capture) = highlights.next_capture.as_ref() {
2867 if self.range.start < capture.node.start_byte() {
2868 next_capture_start = capture.node.start_byte();
2869 break;
2870 } else {
2871 let highlight_id =
2872 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2873 highlights
2874 .stack
2875 .push((capture.node.end_byte(), highlight_id));
2876 highlights.next_capture = highlights.captures.next();
2877 }
2878 }
2879 }
2880
2881 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2882 if endpoint.offset <= self.range.start {
2883 self.update_diagnostic_depths(endpoint);
2884 self.diagnostic_endpoints.next();
2885 } else {
2886 next_diagnostic_endpoint = endpoint.offset;
2887 break;
2888 }
2889 }
2890
2891 if let Some(chunk) = self.chunks.peek() {
2892 let chunk_start = self.range.start;
2893 let mut chunk_end = (self.chunks.offset() + chunk.len())
2894 .min(next_capture_start)
2895 .min(next_diagnostic_endpoint);
2896 let mut highlight_id = None;
2897 if let Some(highlights) = self.highlights.as_ref() {
2898 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2899 chunk_end = chunk_end.min(*parent_capture_end);
2900 highlight_id = Some(*parent_highlight_id);
2901 }
2902 }
2903
2904 let slice =
2905 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2906 self.range.start = chunk_end;
2907 if self.range.start == self.chunks.offset() + chunk.len() {
2908 self.chunks.next().unwrap();
2909 }
2910
2911 Some(Chunk {
2912 text: slice,
2913 syntax_highlight_id: highlight_id,
2914 diagnostic_severity: self.current_diagnostic_severity(),
2915 is_unnecessary: self.current_code_is_unnecessary(),
2916 ..Default::default()
2917 })
2918 } else {
2919 None
2920 }
2921 }
2922}
2923
2924impl operation_queue::Operation for Operation {
2925 fn lamport_timestamp(&self) -> clock::Lamport {
2926 match self {
2927 Operation::Buffer(_) => {
2928 unreachable!("buffer operations should never be deferred at this layer")
2929 }
2930 Operation::UpdateDiagnostics {
2931 lamport_timestamp, ..
2932 }
2933 | Operation::UpdateSelections {
2934 lamport_timestamp, ..
2935 }
2936 | Operation::UpdateCompletionTriggers {
2937 lamport_timestamp, ..
2938 } => *lamport_timestamp,
2939 }
2940 }
2941}
2942
2943impl Default for Diagnostic {
2944 fn default() -> Self {
2945 Self {
2946 source: Default::default(),
2947 code: None,
2948 severity: DiagnosticSeverity::ERROR,
2949 message: Default::default(),
2950 group_id: 0,
2951 is_primary: false,
2952 is_valid: true,
2953 is_disk_based: false,
2954 is_unnecessary: false,
2955 }
2956 }
2957}
2958
2959impl IndentSize {
2960 pub fn spaces(len: u32) -> Self {
2961 Self {
2962 len,
2963 kind: IndentKind::Space,
2964 }
2965 }
2966
2967 pub fn tab() -> Self {
2968 Self {
2969 len: 1,
2970 kind: IndentKind::Tab,
2971 }
2972 }
2973
2974 pub fn chars(&self) -> impl Iterator<Item = char> {
2975 iter::repeat(self.char()).take(self.len as usize)
2976 }
2977
2978 pub fn char(&self) -> char {
2979 match self.kind {
2980 IndentKind::Space => ' ',
2981 IndentKind::Tab => '\t',
2982 }
2983 }
2984
2985 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2986 match direction {
2987 Ordering::Less => {
2988 if self.kind == size.kind && self.len >= size.len {
2989 self.len -= size.len;
2990 }
2991 }
2992 Ordering::Equal => {}
2993 Ordering::Greater => {
2994 if self.len == 0 {
2995 self = size;
2996 } else if self.kind == size.kind {
2997 self.len += size.len;
2998 }
2999 }
3000 }
3001 self
3002 }
3003}
3004
3005impl Completion {
3006 pub fn sort_key(&self) -> (usize, &str) {
3007 let kind_key = match self.lsp_completion.kind {
3008 Some(lsp::CompletionItemKind::VARIABLE) => 0,
3009 _ => 1,
3010 };
3011 (kind_key, &self.label.text[self.label.filter_range.clone()])
3012 }
3013
3014 pub fn is_snippet(&self) -> bool {
3015 self.lsp_completion.insert_text_format == Some(lsp::InsertTextFormat::SNIPPET)
3016 }
3017}
3018
3019pub fn contiguous_ranges(
3020 values: impl Iterator<Item = u32>,
3021 max_len: usize,
3022) -> impl Iterator<Item = Range<u32>> {
3023 let mut values = values;
3024 let mut current_range: Option<Range<u32>> = None;
3025 std::iter::from_fn(move || loop {
3026 if let Some(value) = values.next() {
3027 if let Some(range) = &mut current_range {
3028 if value == range.end && range.len() < max_len {
3029 range.end += 1;
3030 continue;
3031 }
3032 }
3033
3034 let prev_range = current_range.clone();
3035 current_range = Some(value..(value + 1));
3036 if prev_range.is_some() {
3037 return prev_range;
3038 }
3039 } else {
3040 return current_range.take();
3041 }
3042 })
3043}
3044
3045pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3046 if c.is_whitespace() {
3047 return CharKind::Whitespace;
3048 } else if c.is_alphanumeric() || c == '_' {
3049 return CharKind::Word;
3050 }
3051
3052 if let Some(scope) = scope {
3053 if let Some(characters) = scope.word_characters() {
3054 if characters.contains(&c) {
3055 return CharKind::Word;
3056 }
3057 }
3058 }
3059
3060 CharKind::Punctuation
3061}
3062
3063/// Find all of the ranges of whitespace that occur at the ends of lines
3064/// in the given rope.
3065///
3066/// This could also be done with a regex search, but this implementation
3067/// avoids copying text.
3068pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3069 let mut ranges = Vec::new();
3070
3071 let mut offset = 0;
3072 let mut prev_chunk_trailing_whitespace_range = 0..0;
3073 for chunk in rope.chunks() {
3074 let mut prev_line_trailing_whitespace_range = 0..0;
3075 for (i, line) in chunk.split('\n').enumerate() {
3076 let line_end_offset = offset + line.len();
3077 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3078 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3079
3080 if i == 0 && trimmed_line_len == 0 {
3081 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3082 }
3083 if !prev_line_trailing_whitespace_range.is_empty() {
3084 ranges.push(prev_line_trailing_whitespace_range);
3085 }
3086
3087 offset = line_end_offset + 1;
3088 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3089 }
3090
3091 offset -= 1;
3092 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3093 }
3094
3095 if !prev_chunk_trailing_whitespace_range.is_empty() {
3096 ranges.push(prev_chunk_trailing_whitespace_range);
3097 }
3098
3099 ranges
3100}