1pub use crate::{
2 diagnostic_set::DiagnosticSet,
3 highlight_map::{HighlightId, HighlightMap},
4 proto, BracketPair, Grammar, Language, LanguageConfig, LanguageRegistry, PLAIN_TEXT,
5};
6use crate::{
7 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
8 language_settings::{language_settings, LanguageSettings},
9 outline::OutlineItem,
10 syntax_map::{
11 SyntaxLayerInfo, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatches,
12 SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 CodeLabel, LanguageScope, Outline,
15};
16use anyhow::{anyhow, Result};
17pub use clock::ReplicaId;
18use futures::FutureExt as _;
19use gpui2::{AppContext, EventEmitter, HighlightStyle, ModelContext, Task};
20use lsp2::LanguageServerId;
21use parking_lot::Mutex;
22use similar::{ChangeTag, TextDiff};
23use smallvec::SmallVec;
24use smol::future::yield_now;
25use std::{
26 any::Any,
27 cmp::{self, Ordering},
28 collections::BTreeMap,
29 ffi::OsStr,
30 future::Future,
31 iter::{self, Iterator, Peekable},
32 mem,
33 ops::{Deref, Range},
34 path::{Path, PathBuf},
35 str,
36 sync::Arc,
37 time::{Duration, Instant, SystemTime, UNIX_EPOCH},
38 vec,
39};
40use sum_tree::TreeMap;
41use text::operation_queue::OperationQueue;
42pub use text::{Buffer as TextBuffer, BufferSnapshot as TextBufferSnapshot, *};
43use theme2::SyntaxTheme;
44#[cfg(any(test, feature = "test-support"))]
45use util::RandomCharIter;
46use util::{RangeExt, TryFutureExt as _};
47
48#[cfg(any(test, feature = "test-support"))]
49pub use {tree_sitter_rust, tree_sitter_typescript};
50
51pub use lsp2::DiagnosticSeverity;
52
53pub struct Buffer {
54 text: TextBuffer,
55 diff_base: Option<String>,
56 git_diff: git::diff::BufferDiff,
57 file: Option<Arc<dyn File>>,
58 saved_version: clock::Global,
59 saved_version_fingerprint: RopeFingerprint,
60 saved_mtime: SystemTime,
61 transaction_depth: usize,
62 was_dirty_before_starting_transaction: Option<bool>,
63 language: Option<Arc<Language>>,
64 autoindent_requests: Vec<Arc<AutoindentRequest>>,
65 pending_autoindent: Option<Task<()>>,
66 sync_parse_timeout: Duration,
67 syntax_map: Mutex<SyntaxMap>,
68 parsing_in_background: bool,
69 parse_count: usize,
70 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
71 remote_selections: TreeMap<ReplicaId, SelectionSet>,
72 selections_update_count: usize,
73 diagnostics_update_count: usize,
74 diagnostics_timestamp: clock::Lamport,
75 file_update_count: usize,
76 git_diff_update_count: usize,
77 completion_triggers: Vec<String>,
78 completion_triggers_timestamp: clock::Lamport,
79 deferred_ops: OperationQueue<Operation>,
80}
81
82pub struct BufferSnapshot {
83 text: text::BufferSnapshot,
84 pub git_diff: git::diff::BufferDiff,
85 pub(crate) syntax: SyntaxSnapshot,
86 file: Option<Arc<dyn File>>,
87 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
88 diagnostics_update_count: usize,
89 file_update_count: usize,
90 git_diff_update_count: usize,
91 remote_selections: TreeMap<ReplicaId, SelectionSet>,
92 selections_update_count: usize,
93 language: Option<Arc<Language>>,
94 parse_count: usize,
95}
96
97#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
98pub struct IndentSize {
99 pub len: u32,
100 pub kind: IndentKind,
101}
102
103#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
104pub enum IndentKind {
105 #[default]
106 Space,
107 Tab,
108}
109
110#[derive(Copy, Clone, PartialEq, Eq, Debug, Default)]
111pub enum CursorShape {
112 #[default]
113 Bar,
114 Block,
115 Underscore,
116 Hollow,
117}
118
119#[derive(Clone, Debug)]
120struct SelectionSet {
121 line_mode: bool,
122 cursor_shape: CursorShape,
123 selections: Arc<[Selection<Anchor>]>,
124 lamport_timestamp: clock::Lamport,
125}
126
127#[derive(Clone, Debug, PartialEq, Eq)]
128pub struct GroupId {
129 source: Arc<str>,
130 id: usize,
131}
132
133#[derive(Clone, Debug, PartialEq, Eq)]
134pub struct Diagnostic {
135 pub source: Option<String>,
136 pub code: Option<String>,
137 pub severity: DiagnosticSeverity,
138 pub message: String,
139 pub group_id: usize,
140 pub is_valid: bool,
141 pub is_primary: bool,
142 pub is_disk_based: bool,
143 pub is_unnecessary: bool,
144}
145
146#[derive(Clone, Debug)]
147pub struct Completion {
148 pub old_range: Range<Anchor>,
149 pub new_text: String,
150 pub label: CodeLabel,
151 pub server_id: LanguageServerId,
152 pub lsp_completion: lsp2::CompletionItem,
153}
154
155#[derive(Clone, Debug)]
156pub struct CodeAction {
157 pub server_id: LanguageServerId,
158 pub range: Range<Anchor>,
159 pub lsp_action: lsp2::CodeAction,
160}
161
162#[derive(Clone, Debug, PartialEq, Eq)]
163pub enum Operation {
164 Buffer(text::Operation),
165
166 UpdateDiagnostics {
167 server_id: LanguageServerId,
168 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
169 lamport_timestamp: clock::Lamport,
170 },
171
172 UpdateSelections {
173 selections: Arc<[Selection<Anchor>]>,
174 lamport_timestamp: clock::Lamport,
175 line_mode: bool,
176 cursor_shape: CursorShape,
177 },
178
179 UpdateCompletionTriggers {
180 triggers: Vec<String>,
181 lamport_timestamp: clock::Lamport,
182 },
183}
184
185#[derive(Clone, Debug, PartialEq, Eq)]
186pub enum Event {
187 Operation(Operation),
188 Edited,
189 DirtyChanged,
190 Saved,
191 FileHandleChanged,
192 Reloaded,
193 DiffBaseChanged,
194 LanguageChanged,
195 Reparsed,
196 DiagnosticsUpdated,
197 Closed,
198}
199
200pub trait File: Send + Sync {
201 fn as_local(&self) -> Option<&dyn LocalFile>;
202
203 fn is_local(&self) -> bool {
204 self.as_local().is_some()
205 }
206
207 fn mtime(&self) -> SystemTime;
208
209 /// Returns the path of this file relative to the worktree's root directory.
210 fn path(&self) -> &Arc<Path>;
211
212 /// Returns the path of this file relative to the worktree's parent directory (this means it
213 /// includes the name of the worktree's root folder).
214 fn full_path(&self, cx: &AppContext) -> PathBuf;
215
216 /// Returns the last component of this handle's absolute path. If this handle refers to the root
217 /// of its worktree, then this method will return the name of the worktree itself.
218 fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr;
219
220 /// Returns the id of the worktree to which this file belongs.
221 ///
222 /// This is needed for looking up project-specific settings.
223 fn worktree_id(&self) -> usize;
224
225 fn is_deleted(&self) -> bool;
226
227 fn as_any(&self) -> &dyn Any;
228
229 fn to_proto(&self) -> rpc::proto::File;
230}
231
232pub trait LocalFile: File {
233 /// Returns the absolute path of this file.
234 fn abs_path(&self, cx: &AppContext) -> PathBuf;
235
236 fn load(&self, cx: &AppContext) -> Task<Result<String>>;
237
238 fn buffer_reloaded(
239 &self,
240 buffer_id: u64,
241 version: &clock::Global,
242 fingerprint: RopeFingerprint,
243 line_ending: LineEnding,
244 mtime: SystemTime,
245 cx: &mut AppContext,
246 );
247}
248
249#[derive(Clone, Debug)]
250pub enum AutoindentMode {
251 /// Indent each line of inserted text.
252 EachLine,
253 /// Apply the same indentation adjustment to all of the lines
254 /// in a given insertion.
255 Block {
256 /// The original indentation level of the first line of each
257 /// insertion, if it has been copied.
258 original_indent_columns: Vec<u32>,
259 },
260}
261
262#[derive(Clone)]
263struct AutoindentRequest {
264 before_edit: BufferSnapshot,
265 entries: Vec<AutoindentRequestEntry>,
266 is_block_mode: bool,
267}
268
269#[derive(Clone)]
270struct AutoindentRequestEntry {
271 /// A range of the buffer whose indentation should be adjusted.
272 range: Range<Anchor>,
273 /// Whether or not these lines should be considered brand new, for the
274 /// purpose of auto-indent. When text is not new, its indentation will
275 /// only be adjusted if the suggested indentation level has *changed*
276 /// since the edit was made.
277 first_line_is_new: bool,
278 indent_size: IndentSize,
279 original_indent_column: Option<u32>,
280}
281
282#[derive(Debug)]
283struct IndentSuggestion {
284 basis_row: u32,
285 delta: Ordering,
286 within_error: bool,
287}
288
289struct BufferChunkHighlights<'a> {
290 captures: SyntaxMapCaptures<'a>,
291 next_capture: Option<SyntaxMapCapture<'a>>,
292 stack: Vec<(usize, HighlightId)>,
293 highlight_maps: Vec<HighlightMap>,
294}
295
296pub struct BufferChunks<'a> {
297 range: Range<usize>,
298 chunks: text::Chunks<'a>,
299 diagnostic_endpoints: Peekable<vec::IntoIter<DiagnosticEndpoint>>,
300 error_depth: usize,
301 warning_depth: usize,
302 information_depth: usize,
303 hint_depth: usize,
304 unnecessary_depth: usize,
305 highlights: Option<BufferChunkHighlights<'a>>,
306}
307
308#[derive(Clone, Copy, Debug, Default)]
309pub struct Chunk<'a> {
310 pub text: &'a str,
311 pub syntax_highlight_id: Option<HighlightId>,
312 pub highlight_style: Option<HighlightStyle>,
313 pub diagnostic_severity: Option<DiagnosticSeverity>,
314 pub is_unnecessary: bool,
315 pub is_tab: bool,
316}
317
318pub struct Diff {
319 pub(crate) base_version: clock::Global,
320 line_ending: LineEnding,
321 edits: Vec<(Range<usize>, Arc<str>)>,
322}
323
324#[derive(Clone, Copy)]
325pub(crate) struct DiagnosticEndpoint {
326 offset: usize,
327 is_start: bool,
328 severity: DiagnosticSeverity,
329 is_unnecessary: bool,
330}
331
332#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
333pub enum CharKind {
334 Punctuation,
335 Whitespace,
336 Word,
337}
338
339impl CharKind {
340 pub fn coerce_punctuation(self, treat_punctuation_as_word: bool) -> Self {
341 if treat_punctuation_as_word && self == CharKind::Punctuation {
342 CharKind::Word
343 } else {
344 self
345 }
346 }
347}
348
349impl Buffer {
350 pub fn new<T: Into<String>>(replica_id: ReplicaId, id: u64, base_text: T) -> Self {
351 Self::build(
352 TextBuffer::new(replica_id, id, base_text.into()),
353 None,
354 None,
355 )
356 }
357
358 pub fn remote(remote_id: u64, replica_id: ReplicaId, base_text: String) -> Self {
359 Self::build(
360 TextBuffer::new(replica_id, remote_id, base_text),
361 None,
362 None,
363 )
364 }
365
366 pub fn from_proto(
367 replica_id: ReplicaId,
368 message: proto::BufferState,
369 file: Option<Arc<dyn File>>,
370 ) -> Result<Self> {
371 let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
372 let mut this = Self::build(
373 buffer,
374 message.diff_base.map(|text| text.into_boxed_str().into()),
375 file,
376 );
377 this.text.set_line_ending(proto::deserialize_line_ending(
378 rpc::proto::LineEnding::from_i32(message.line_ending)
379 .ok_or_else(|| anyhow!("missing line_ending"))?,
380 ));
381 this.saved_version = proto::deserialize_version(&message.saved_version);
382 this.saved_version_fingerprint =
383 proto::deserialize_fingerprint(&message.saved_version_fingerprint)?;
384 this.saved_mtime = message
385 .saved_mtime
386 .ok_or_else(|| anyhow!("invalid saved_mtime"))?
387 .into();
388 Ok(this)
389 }
390
391 pub fn to_proto(&self) -> proto::BufferState {
392 proto::BufferState {
393 id: self.remote_id(),
394 file: self.file.as_ref().map(|f| f.to_proto()),
395 base_text: self.base_text().to_string(),
396 diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
397 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
398 saved_version: proto::serialize_version(&self.saved_version),
399 saved_version_fingerprint: proto::serialize_fingerprint(self.saved_version_fingerprint),
400 saved_mtime: Some(self.saved_mtime.into()),
401 }
402 }
403
404 pub fn serialize_ops(
405 &self,
406 since: Option<clock::Global>,
407 cx: &AppContext,
408 ) -> Task<Vec<proto::Operation>> {
409 let mut operations = Vec::new();
410 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
411
412 operations.extend(self.remote_selections.iter().map(|(_, set)| {
413 proto::serialize_operation(&Operation::UpdateSelections {
414 selections: set.selections.clone(),
415 lamport_timestamp: set.lamport_timestamp,
416 line_mode: set.line_mode,
417 cursor_shape: set.cursor_shape,
418 })
419 }));
420
421 for (server_id, diagnostics) in &self.diagnostics {
422 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
423 lamport_timestamp: self.diagnostics_timestamp,
424 server_id: *server_id,
425 diagnostics: diagnostics.iter().cloned().collect(),
426 }));
427 }
428
429 operations.push(proto::serialize_operation(
430 &Operation::UpdateCompletionTriggers {
431 triggers: self.completion_triggers.clone(),
432 lamport_timestamp: self.completion_triggers_timestamp,
433 },
434 ));
435
436 let text_operations = self.text.operations().clone();
437 cx.spawn(|_| async move {
438 let since = since.unwrap_or_default();
439 operations.extend(
440 text_operations
441 .iter()
442 .filter(|(_, op)| !since.observed(op.timestamp()))
443 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
444 );
445 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
446 operations
447 })
448 }
449
450 pub fn with_language(mut self, language: Arc<Language>, cx: &mut ModelContext<Self>) -> Self {
451 self.set_language(Some(language), cx);
452 self
453 }
454
455 pub fn build(
456 buffer: TextBuffer,
457 diff_base: Option<String>,
458 file: Option<Arc<dyn File>>,
459 ) -> Self {
460 let saved_mtime = if let Some(file) = file.as_ref() {
461 file.mtime()
462 } else {
463 UNIX_EPOCH
464 };
465
466 Self {
467 saved_mtime,
468 saved_version: buffer.version(),
469 saved_version_fingerprint: buffer.as_rope().fingerprint(),
470 transaction_depth: 0,
471 was_dirty_before_starting_transaction: None,
472 text: buffer,
473 diff_base,
474 git_diff: git::diff::BufferDiff::new(),
475 file,
476 syntax_map: Mutex::new(SyntaxMap::new()),
477 parsing_in_background: false,
478 parse_count: 0,
479 sync_parse_timeout: Duration::from_millis(1),
480 autoindent_requests: Default::default(),
481 pending_autoindent: Default::default(),
482 language: None,
483 remote_selections: Default::default(),
484 selections_update_count: 0,
485 diagnostics: Default::default(),
486 diagnostics_update_count: 0,
487 diagnostics_timestamp: Default::default(),
488 file_update_count: 0,
489 git_diff_update_count: 0,
490 completion_triggers: Default::default(),
491 completion_triggers_timestamp: Default::default(),
492 deferred_ops: OperationQueue::new(),
493 }
494 }
495
496 pub fn snapshot(&self) -> BufferSnapshot {
497 let text = self.text.snapshot();
498 let mut syntax_map = self.syntax_map.lock();
499 syntax_map.interpolate(&text);
500 let syntax = syntax_map.snapshot();
501
502 BufferSnapshot {
503 text,
504 syntax,
505 git_diff: self.git_diff.clone(),
506 file: self.file.clone(),
507 remote_selections: self.remote_selections.clone(),
508 diagnostics: self.diagnostics.clone(),
509 diagnostics_update_count: self.diagnostics_update_count,
510 file_update_count: self.file_update_count,
511 git_diff_update_count: self.git_diff_update_count,
512 language: self.language.clone(),
513 parse_count: self.parse_count,
514 selections_update_count: self.selections_update_count,
515 }
516 }
517
518 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
519 &self.text
520 }
521
522 pub fn text_snapshot(&self) -> text::BufferSnapshot {
523 self.text.snapshot()
524 }
525
526 pub fn file(&self) -> Option<&Arc<dyn File>> {
527 self.file.as_ref()
528 }
529
530 pub fn saved_version(&self) -> &clock::Global {
531 &self.saved_version
532 }
533
534 pub fn saved_version_fingerprint(&self) -> RopeFingerprint {
535 self.saved_version_fingerprint
536 }
537
538 pub fn saved_mtime(&self) -> SystemTime {
539 self.saved_mtime
540 }
541
542 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut ModelContext<Self>) {
543 self.syntax_map.lock().clear();
544 self.language = language;
545 self.reparse(cx);
546 cx.emit(Event::LanguageChanged);
547 }
548
549 pub fn set_language_registry(&mut self, language_registry: Arc<LanguageRegistry>) {
550 self.syntax_map
551 .lock()
552 .set_language_registry(language_registry);
553 }
554
555 pub fn did_save(
556 &mut self,
557 version: clock::Global,
558 fingerprint: RopeFingerprint,
559 mtime: SystemTime,
560 cx: &mut ModelContext<Self>,
561 ) {
562 self.saved_version = version;
563 self.saved_version_fingerprint = fingerprint;
564 self.saved_mtime = mtime;
565 cx.emit(Event::Saved);
566 cx.notify();
567 }
568
569 pub fn reload(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<Option<Transaction>>> {
570 cx.spawn(|this, mut cx| async move {
571 if let Some((new_mtime, new_text)) = this.update(&mut cx, |this, cx| {
572 let file = this.file.as_ref()?.as_local()?;
573 Some((file.mtime(), file.load(cx)))
574 })? {
575 let new_text = new_text.await?;
576 let diff = this
577 .update(&mut cx, |this, cx| this.diff(new_text, cx))?
578 .await;
579 this.update(&mut cx, |this, cx| {
580 if this.version() == diff.base_version {
581 this.finalize_last_transaction();
582 this.apply_diff(diff, cx);
583 if let Some(transaction) = this.finalize_last_transaction().cloned() {
584 this.did_reload(
585 this.version(),
586 this.as_rope().fingerprint(),
587 this.line_ending(),
588 new_mtime,
589 cx,
590 );
591 return Some(transaction);
592 }
593 }
594 None
595 })
596 } else {
597 Ok(None)
598 }
599 })
600 }
601
602 pub fn did_reload(
603 &mut self,
604 version: clock::Global,
605 fingerprint: RopeFingerprint,
606 line_ending: LineEnding,
607 mtime: SystemTime,
608 cx: &mut ModelContext<Self>,
609 ) {
610 self.saved_version = version;
611 self.saved_version_fingerprint = fingerprint;
612 self.text.set_line_ending(line_ending);
613 self.saved_mtime = mtime;
614 if let Some(file) = self.file.as_ref().and_then(|f| f.as_local()) {
615 file.buffer_reloaded(
616 self.remote_id(),
617 &self.saved_version,
618 self.saved_version_fingerprint,
619 self.line_ending(),
620 self.saved_mtime,
621 cx,
622 );
623 }
624 cx.emit(Event::Reloaded);
625 cx.notify();
626 }
627
628 pub fn file_updated(
629 &mut self,
630 new_file: Arc<dyn File>,
631 cx: &mut ModelContext<Self>,
632 ) -> Task<()> {
633 let mut file_changed = false;
634 let mut task = Task::ready(());
635
636 if let Some(old_file) = self.file.as_ref() {
637 if new_file.path() != old_file.path() {
638 file_changed = true;
639 }
640
641 if new_file.is_deleted() {
642 if !old_file.is_deleted() {
643 file_changed = true;
644 if !self.is_dirty() {
645 cx.emit(Event::DirtyChanged);
646 }
647 }
648 } else {
649 let new_mtime = new_file.mtime();
650 if new_mtime != old_file.mtime() {
651 file_changed = true;
652
653 if !self.is_dirty() {
654 let reload = self.reload(cx).log_err().map(drop);
655 task = cx.executor().spawn(reload);
656 }
657 }
658 }
659 } else {
660 file_changed = true;
661 };
662
663 self.file = Some(new_file);
664 if file_changed {
665 self.file_update_count += 1;
666 cx.emit(Event::FileHandleChanged);
667 cx.notify();
668 }
669 task
670 }
671
672 pub fn diff_base(&self) -> Option<&str> {
673 self.diff_base.as_deref()
674 }
675
676 pub fn set_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
677 self.diff_base = diff_base;
678 self.git_diff_recalc(cx);
679 cx.emit(Event::DiffBaseChanged);
680 }
681
682 pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) -> Option<Task<()>> {
683 let diff_base = self.diff_base.clone()?; // TODO: Make this an Arc
684 let snapshot = self.snapshot();
685
686 let mut diff = self.git_diff.clone();
687 let diff = cx.executor().spawn(async move {
688 diff.update(&diff_base, &snapshot).await;
689 diff
690 });
691
692 Some(cx.spawn(|this, mut cx| async move {
693 let buffer_diff = diff.await;
694 this.update(&mut cx, |this, _| {
695 this.git_diff = buffer_diff;
696 this.git_diff_update_count += 1;
697 })
698 .ok();
699 }))
700 }
701
702 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
703 cx.emit(Event::Closed);
704 }
705
706 pub fn language(&self) -> Option<&Arc<Language>> {
707 self.language.as_ref()
708 }
709
710 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
711 let offset = position.to_offset(self);
712 self.syntax_map
713 .lock()
714 .layers_for_range(offset..offset, &self.text)
715 .last()
716 .map(|info| info.language.clone())
717 .or_else(|| self.language.clone())
718 }
719
720 pub fn parse_count(&self) -> usize {
721 self.parse_count
722 }
723
724 pub fn selections_update_count(&self) -> usize {
725 self.selections_update_count
726 }
727
728 pub fn diagnostics_update_count(&self) -> usize {
729 self.diagnostics_update_count
730 }
731
732 pub fn file_update_count(&self) -> usize {
733 self.file_update_count
734 }
735
736 pub fn git_diff_update_count(&self) -> usize {
737 self.git_diff_update_count
738 }
739
740 #[cfg(any(test, feature = "test-support"))]
741 pub fn is_parsing(&self) -> bool {
742 self.parsing_in_background
743 }
744
745 pub fn contains_unknown_injections(&self) -> bool {
746 self.syntax_map.lock().contains_unknown_injections()
747 }
748
749 #[cfg(test)]
750 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
751 self.sync_parse_timeout = timeout;
752 }
753
754 /// Called after an edit to synchronize the buffer's main parse tree with
755 /// the buffer's new underlying state.
756 ///
757 /// Locks the syntax map and interpolates the edits since the last reparse
758 /// into the foreground syntax tree.
759 ///
760 /// Then takes a stable snapshot of the syntax map before unlocking it.
761 /// The snapshot with the interpolated edits is sent to a background thread,
762 /// where we ask Tree-sitter to perform an incremental parse.
763 ///
764 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
765 /// waiting on the parse to complete. As soon as it completes, we proceed
766 /// synchronously, unless a 1ms timeout elapses.
767 ///
768 /// If we time out waiting on the parse, we spawn a second task waiting
769 /// until the parse does complete and return with the interpolated tree still
770 /// in the foreground. When the background parse completes, call back into
771 /// the main thread and assign the foreground parse state.
772 ///
773 /// If the buffer or grammar changed since the start of the background parse,
774 /// initiate an additional reparse recursively. To avoid concurrent parses
775 /// for the same buffer, we only initiate a new parse if we are not already
776 /// parsing in the background.
777 pub fn reparse(&mut self, cx: &mut ModelContext<Self>) {
778 if self.parsing_in_background {
779 return;
780 }
781 let language = if let Some(language) = self.language.clone() {
782 language
783 } else {
784 return;
785 };
786
787 let text = self.text_snapshot();
788 let parsed_version = self.version();
789
790 let mut syntax_map = self.syntax_map.lock();
791 syntax_map.interpolate(&text);
792 let language_registry = syntax_map.language_registry();
793 let mut syntax_snapshot = syntax_map.snapshot();
794 drop(syntax_map);
795
796 let parse_task = cx.executor().spawn({
797 let language = language.clone();
798 let language_registry = language_registry.clone();
799 async move {
800 syntax_snapshot.reparse(&text, language_registry, language);
801 syntax_snapshot
802 }
803 });
804
805 match cx
806 .executor()
807 .block_with_timeout(self.sync_parse_timeout, parse_task)
808 {
809 Ok(new_syntax_snapshot) => {
810 self.did_finish_parsing(new_syntax_snapshot, cx);
811 return;
812 }
813 Err(parse_task) => {
814 self.parsing_in_background = true;
815 cx.spawn(move |this, mut cx| async move {
816 let new_syntax_map = parse_task.await;
817 this.update(&mut cx, move |this, cx| {
818 let grammar_changed =
819 this.language.as_ref().map_or(true, |current_language| {
820 !Arc::ptr_eq(&language, current_language)
821 });
822 let language_registry_changed = new_syntax_map
823 .contains_unknown_injections()
824 && language_registry.map_or(false, |registry| {
825 registry.version() != new_syntax_map.language_registry_version()
826 });
827 let parse_again = language_registry_changed
828 || grammar_changed
829 || this.version.changed_since(&parsed_version);
830 this.did_finish_parsing(new_syntax_map, cx);
831 this.parsing_in_background = false;
832 if parse_again {
833 this.reparse(cx);
834 }
835 });
836 })
837 .detach();
838 }
839 }
840 }
841
842 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut ModelContext<Self>) {
843 self.parse_count += 1;
844 self.syntax_map.lock().did_parse(syntax_snapshot);
845 self.request_autoindent(cx);
846 cx.emit(Event::Reparsed);
847 cx.notify();
848 }
849
850 pub fn update_diagnostics(
851 &mut self,
852 server_id: LanguageServerId,
853 diagnostics: DiagnosticSet,
854 cx: &mut ModelContext<Self>,
855 ) {
856 let lamport_timestamp = self.text.lamport_clock.tick();
857 let op = Operation::UpdateDiagnostics {
858 server_id,
859 diagnostics: diagnostics.iter().cloned().collect(),
860 lamport_timestamp,
861 };
862 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
863 self.send_operation(op, cx);
864 }
865
866 fn request_autoindent(&mut self, cx: &mut ModelContext<Self>) {
867 if let Some(indent_sizes) = self.compute_autoindents() {
868 let indent_sizes = cx.executor().spawn(indent_sizes);
869 match cx
870 .executor()
871 .block_with_timeout(Duration::from_micros(500), indent_sizes)
872 {
873 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
874 Err(indent_sizes) => {
875 self.pending_autoindent = Some(cx.spawn(|this, mut cx| async move {
876 let indent_sizes = indent_sizes.await;
877 this.update(&mut cx, |this, cx| {
878 this.apply_autoindents(indent_sizes, cx);
879 });
880 }));
881 }
882 }
883 } else {
884 self.autoindent_requests.clear();
885 }
886 }
887
888 fn compute_autoindents(&self) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>>> {
889 let max_rows_between_yields = 100;
890 let snapshot = self.snapshot();
891 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
892 return None;
893 }
894
895 let autoindent_requests = self.autoindent_requests.clone();
896 Some(async move {
897 let mut indent_sizes = BTreeMap::new();
898 for request in autoindent_requests {
899 // Resolve each edited range to its row in the current buffer and in the
900 // buffer before this batch of edits.
901 let mut row_ranges = Vec::new();
902 let mut old_to_new_rows = BTreeMap::new();
903 let mut language_indent_sizes_by_new_row = Vec::new();
904 for entry in &request.entries {
905 let position = entry.range.start;
906 let new_row = position.to_point(&snapshot).row;
907 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
908 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
909
910 if !entry.first_line_is_new {
911 let old_row = position.to_point(&request.before_edit).row;
912 old_to_new_rows.insert(old_row, new_row);
913 }
914 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
915 }
916
917 // Build a map containing the suggested indentation for each of the edited lines
918 // with respect to the state of the buffer before these edits. This map is keyed
919 // by the rows for these lines in the current state of the buffer.
920 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
921 let old_edited_ranges =
922 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
923 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
924 let mut language_indent_size = IndentSize::default();
925 for old_edited_range in old_edited_ranges {
926 let suggestions = request
927 .before_edit
928 .suggest_autoindents(old_edited_range.clone())
929 .into_iter()
930 .flatten();
931 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
932 if let Some(suggestion) = suggestion {
933 let new_row = *old_to_new_rows.get(&old_row).unwrap();
934
935 // Find the indent size based on the language for this row.
936 while let Some((row, size)) = language_indent_sizes.peek() {
937 if *row > new_row {
938 break;
939 }
940 language_indent_size = *size;
941 language_indent_sizes.next();
942 }
943
944 let suggested_indent = old_to_new_rows
945 .get(&suggestion.basis_row)
946 .and_then(|from_row| {
947 Some(old_suggestions.get(from_row).copied()?.0)
948 })
949 .unwrap_or_else(|| {
950 request
951 .before_edit
952 .indent_size_for_line(suggestion.basis_row)
953 })
954 .with_delta(suggestion.delta, language_indent_size);
955 old_suggestions
956 .insert(new_row, (suggested_indent, suggestion.within_error));
957 }
958 }
959 yield_now().await;
960 }
961
962 // In block mode, only compute indentation suggestions for the first line
963 // of each insertion. Otherwise, compute suggestions for every inserted line.
964 let new_edited_row_ranges = contiguous_ranges(
965 row_ranges.iter().flat_map(|(range, _)| {
966 if request.is_block_mode {
967 range.start..range.start + 1
968 } else {
969 range.clone()
970 }
971 }),
972 max_rows_between_yields,
973 );
974
975 // Compute new suggestions for each line, but only include them in the result
976 // if they differ from the old suggestion for that line.
977 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
978 let mut language_indent_size = IndentSize::default();
979 for new_edited_row_range in new_edited_row_ranges {
980 let suggestions = snapshot
981 .suggest_autoindents(new_edited_row_range.clone())
982 .into_iter()
983 .flatten();
984 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
985 if let Some(suggestion) = suggestion {
986 // Find the indent size based on the language for this row.
987 while let Some((row, size)) = language_indent_sizes.peek() {
988 if *row > new_row {
989 break;
990 }
991 language_indent_size = *size;
992 language_indent_sizes.next();
993 }
994
995 let suggested_indent = indent_sizes
996 .get(&suggestion.basis_row)
997 .copied()
998 .unwrap_or_else(|| {
999 snapshot.indent_size_for_line(suggestion.basis_row)
1000 })
1001 .with_delta(suggestion.delta, language_indent_size);
1002 if old_suggestions.get(&new_row).map_or(
1003 true,
1004 |(old_indentation, was_within_error)| {
1005 suggested_indent != *old_indentation
1006 && (!suggestion.within_error || *was_within_error)
1007 },
1008 ) {
1009 indent_sizes.insert(new_row, suggested_indent);
1010 }
1011 }
1012 }
1013 yield_now().await;
1014 }
1015
1016 // For each block of inserted text, adjust the indentation of the remaining
1017 // lines of the block by the same amount as the first line was adjusted.
1018 if request.is_block_mode {
1019 for (row_range, original_indent_column) in
1020 row_ranges
1021 .into_iter()
1022 .filter_map(|(range, original_indent_column)| {
1023 if range.len() > 1 {
1024 Some((range, original_indent_column?))
1025 } else {
1026 None
1027 }
1028 })
1029 {
1030 let new_indent = indent_sizes
1031 .get(&row_range.start)
1032 .copied()
1033 .unwrap_or_else(|| snapshot.indent_size_for_line(row_range.start));
1034 let delta = new_indent.len as i64 - original_indent_column as i64;
1035 if delta != 0 {
1036 for row in row_range.skip(1) {
1037 indent_sizes.entry(row).or_insert_with(|| {
1038 let mut size = snapshot.indent_size_for_line(row);
1039 if size.kind == new_indent.kind {
1040 match delta.cmp(&0) {
1041 Ordering::Greater => size.len += delta as u32,
1042 Ordering::Less => {
1043 size.len = size.len.saturating_sub(-delta as u32)
1044 }
1045 Ordering::Equal => {}
1046 }
1047 }
1048 size
1049 });
1050 }
1051 }
1052 }
1053 }
1054 }
1055
1056 indent_sizes
1057 })
1058 }
1059
1060 fn apply_autoindents(
1061 &mut self,
1062 indent_sizes: BTreeMap<u32, IndentSize>,
1063 cx: &mut ModelContext<Self>,
1064 ) {
1065 self.autoindent_requests.clear();
1066
1067 let edits: Vec<_> = indent_sizes
1068 .into_iter()
1069 .filter_map(|(row, indent_size)| {
1070 let current_size = indent_size_for_line(self, row);
1071 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1072 })
1073 .collect();
1074
1075 self.edit(edits, None, cx);
1076 }
1077
1078 // Create a minimal edit that will cause the the given row to be indented
1079 // with the given size. After applying this edit, the length of the line
1080 // will always be at least `new_size.len`.
1081 pub fn edit_for_indent_size_adjustment(
1082 row: u32,
1083 current_size: IndentSize,
1084 new_size: IndentSize,
1085 ) -> Option<(Range<Point>, String)> {
1086 if new_size.kind != current_size.kind {
1087 Some((
1088 Point::new(row, 0)..Point::new(row, current_size.len),
1089 iter::repeat(new_size.char())
1090 .take(new_size.len as usize)
1091 .collect::<String>(),
1092 ))
1093 } else {
1094 match new_size.len.cmp(¤t_size.len) {
1095 Ordering::Greater => {
1096 let point = Point::new(row, 0);
1097 Some((
1098 point..point,
1099 iter::repeat(new_size.char())
1100 .take((new_size.len - current_size.len) as usize)
1101 .collect::<String>(),
1102 ))
1103 }
1104
1105 Ordering::Less => Some((
1106 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1107 String::new(),
1108 )),
1109
1110 Ordering::Equal => None,
1111 }
1112 }
1113 }
1114
1115 pub fn diff(&self, mut new_text: String, cx: &AppContext) -> Task<Diff> {
1116 let old_text = self.as_rope().clone();
1117 let base_version = self.version();
1118 cx.executor().spawn(async move {
1119 let old_text = old_text.to_string();
1120 let line_ending = LineEnding::detect(&new_text);
1121 LineEnding::normalize(&mut new_text);
1122 let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
1123 let mut edits = Vec::new();
1124 let mut offset = 0;
1125 let empty: Arc<str> = "".into();
1126 for change in diff.iter_all_changes() {
1127 let value = change.value();
1128 let end_offset = offset + value.len();
1129 match change.tag() {
1130 ChangeTag::Equal => {
1131 offset = end_offset;
1132 }
1133 ChangeTag::Delete => {
1134 edits.push((offset..end_offset, empty.clone()));
1135 offset = end_offset;
1136 }
1137 ChangeTag::Insert => {
1138 edits.push((offset..offset, value.into()));
1139 }
1140 }
1141 }
1142 Diff {
1143 base_version,
1144 line_ending,
1145 edits,
1146 }
1147 })
1148 }
1149
1150 /// Spawn a background task that searches the buffer for any whitespace
1151 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1152 pub fn remove_trailing_whitespace(&self, cx: &AppContext) -> Task<Diff> {
1153 let old_text = self.as_rope().clone();
1154 let line_ending = self.line_ending();
1155 let base_version = self.version();
1156 cx.executor().spawn(async move {
1157 let ranges = trailing_whitespace_ranges(&old_text);
1158 let empty = Arc::<str>::from("");
1159 Diff {
1160 base_version,
1161 line_ending,
1162 edits: ranges
1163 .into_iter()
1164 .map(|range| (range, empty.clone()))
1165 .collect(),
1166 }
1167 })
1168 }
1169
1170 /// Ensure that the buffer ends with a single newline character, and
1171 /// no other whitespace.
1172 pub fn ensure_final_newline(&mut self, cx: &mut ModelContext<Self>) {
1173 let len = self.len();
1174 let mut offset = len;
1175 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1176 let non_whitespace_len = chunk
1177 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1178 .len();
1179 offset -= chunk.len();
1180 offset += non_whitespace_len;
1181 if non_whitespace_len != 0 {
1182 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1183 return;
1184 }
1185 break;
1186 }
1187 }
1188 self.edit([(offset..len, "\n")], None, cx);
1189 }
1190
1191 /// Apply a diff to the buffer. If the buffer has changed since the given diff was
1192 /// calculated, then adjust the diff to account for those changes, and discard any
1193 /// parts of the diff that conflict with those changes.
1194 pub fn apply_diff(&mut self, diff: Diff, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1195 // Check for any edits to the buffer that have occurred since this diff
1196 // was computed.
1197 let snapshot = self.snapshot();
1198 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1199 let mut delta = 0;
1200 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1201 while let Some(edit_since) = edits_since.peek() {
1202 // If the edit occurs after a diff hunk, then it does not
1203 // affect that hunk.
1204 if edit_since.old.start > range.end {
1205 break;
1206 }
1207 // If the edit precedes the diff hunk, then adjust the hunk
1208 // to reflect the edit.
1209 else if edit_since.old.end < range.start {
1210 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1211 edits_since.next();
1212 }
1213 // If the edit intersects a diff hunk, then discard that hunk.
1214 else {
1215 return None;
1216 }
1217 }
1218
1219 let start = (range.start as i64 + delta) as usize;
1220 let end = (range.end as i64 + delta) as usize;
1221 Some((start..end, new_text))
1222 });
1223
1224 self.start_transaction();
1225 self.text.set_line_ending(diff.line_ending);
1226 self.edit(adjusted_edits, None, cx);
1227 self.end_transaction(cx)
1228 }
1229
1230 pub fn is_dirty(&self) -> bool {
1231 self.saved_version_fingerprint != self.as_rope().fingerprint()
1232 || self.file.as_ref().map_or(false, |file| file.is_deleted())
1233 }
1234
1235 pub fn has_conflict(&self) -> bool {
1236 self.saved_version_fingerprint != self.as_rope().fingerprint()
1237 && self
1238 .file
1239 .as_ref()
1240 .map_or(false, |file| file.mtime() > self.saved_mtime)
1241 }
1242
1243 pub fn subscribe(&mut self) -> Subscription {
1244 self.text.subscribe()
1245 }
1246
1247 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1248 self.start_transaction_at(Instant::now())
1249 }
1250
1251 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1252 self.transaction_depth += 1;
1253 if self.was_dirty_before_starting_transaction.is_none() {
1254 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1255 }
1256 self.text.start_transaction_at(now)
1257 }
1258
1259 pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1260 self.end_transaction_at(Instant::now(), cx)
1261 }
1262
1263 pub fn end_transaction_at(
1264 &mut self,
1265 now: Instant,
1266 cx: &mut ModelContext<Self>,
1267 ) -> Option<TransactionId> {
1268 assert!(self.transaction_depth > 0);
1269 self.transaction_depth -= 1;
1270 let was_dirty = if self.transaction_depth == 0 {
1271 self.was_dirty_before_starting_transaction.take().unwrap()
1272 } else {
1273 false
1274 };
1275 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
1276 self.did_edit(&start_version, was_dirty, cx);
1277 Some(transaction_id)
1278 } else {
1279 None
1280 }
1281 }
1282
1283 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
1284 self.text.push_transaction(transaction, now);
1285 }
1286
1287 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
1288 self.text.finalize_last_transaction()
1289 }
1290
1291 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
1292 self.text.group_until_transaction(transaction_id);
1293 }
1294
1295 pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
1296 self.text.forget_transaction(transaction_id);
1297 }
1298
1299 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
1300 self.text.merge_transactions(transaction, destination);
1301 }
1302
1303 pub fn wait_for_edits(
1304 &mut self,
1305 edit_ids: impl IntoIterator<Item = clock::Lamport>,
1306 ) -> impl Future<Output = Result<()>> {
1307 self.text.wait_for_edits(edit_ids)
1308 }
1309
1310 pub fn wait_for_anchors(
1311 &mut self,
1312 anchors: impl IntoIterator<Item = Anchor>,
1313 ) -> impl 'static + Future<Output = Result<()>> {
1314 self.text.wait_for_anchors(anchors)
1315 }
1316
1317 pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
1318 self.text.wait_for_version(version)
1319 }
1320
1321 pub fn give_up_waiting(&mut self) {
1322 self.text.give_up_waiting();
1323 }
1324
1325 pub fn set_active_selections(
1326 &mut self,
1327 selections: Arc<[Selection<Anchor>]>,
1328 line_mode: bool,
1329 cursor_shape: CursorShape,
1330 cx: &mut ModelContext<Self>,
1331 ) {
1332 let lamport_timestamp = self.text.lamport_clock.tick();
1333 self.remote_selections.insert(
1334 self.text.replica_id(),
1335 SelectionSet {
1336 selections: selections.clone(),
1337 lamport_timestamp,
1338 line_mode,
1339 cursor_shape,
1340 },
1341 );
1342 self.send_operation(
1343 Operation::UpdateSelections {
1344 selections,
1345 line_mode,
1346 lamport_timestamp,
1347 cursor_shape,
1348 },
1349 cx,
1350 );
1351 }
1352
1353 pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
1354 if self
1355 .remote_selections
1356 .get(&self.text.replica_id())
1357 .map_or(true, |set| !set.selections.is_empty())
1358 {
1359 self.set_active_selections(Arc::from([]), false, Default::default(), cx);
1360 }
1361 }
1362
1363 pub fn set_text<T>(&mut self, text: T, cx: &mut ModelContext<Self>) -> Option<clock::Lamport>
1364 where
1365 T: Into<Arc<str>>,
1366 {
1367 self.autoindent_requests.clear();
1368 self.edit([(0..self.len(), text)], None, cx)
1369 }
1370
1371 pub fn edit<I, S, T>(
1372 &mut self,
1373 edits_iter: I,
1374 autoindent_mode: Option<AutoindentMode>,
1375 cx: &mut ModelContext<Self>,
1376 ) -> Option<clock::Lamport>
1377 where
1378 I: IntoIterator<Item = (Range<S>, T)>,
1379 S: ToOffset,
1380 T: Into<Arc<str>>,
1381 {
1382 // Skip invalid edits and coalesce contiguous ones.
1383 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
1384 for (range, new_text) in edits_iter {
1385 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
1386 if range.start > range.end {
1387 mem::swap(&mut range.start, &mut range.end);
1388 }
1389 let new_text = new_text.into();
1390 if !new_text.is_empty() || !range.is_empty() {
1391 if let Some((prev_range, prev_text)) = edits.last_mut() {
1392 if prev_range.end >= range.start {
1393 prev_range.end = cmp::max(prev_range.end, range.end);
1394 *prev_text = format!("{prev_text}{new_text}").into();
1395 } else {
1396 edits.push((range, new_text));
1397 }
1398 } else {
1399 edits.push((range, new_text));
1400 }
1401 }
1402 }
1403 if edits.is_empty() {
1404 return None;
1405 }
1406
1407 self.start_transaction();
1408 self.pending_autoindent.take();
1409 let autoindent_request = autoindent_mode
1410 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
1411
1412 let edit_operation = self.text.edit(edits.iter().cloned());
1413 let edit_id = edit_operation.timestamp();
1414
1415 if let Some((before_edit, mode)) = autoindent_request {
1416 let mut delta = 0isize;
1417 let entries = edits
1418 .into_iter()
1419 .enumerate()
1420 .zip(&edit_operation.as_edit().unwrap().new_text)
1421 .map(|((ix, (range, _)), new_text)| {
1422 let new_text_length = new_text.len();
1423 let old_start = range.start.to_point(&before_edit);
1424 let new_start = (delta + range.start as isize) as usize;
1425 delta += new_text_length as isize - (range.end as isize - range.start as isize);
1426
1427 let mut range_of_insertion_to_indent = 0..new_text_length;
1428 let mut first_line_is_new = false;
1429 let mut original_indent_column = None;
1430
1431 // When inserting an entire line at the beginning of an existing line,
1432 // treat the insertion as new.
1433 if new_text.contains('\n')
1434 && old_start.column <= before_edit.indent_size_for_line(old_start.row).len
1435 {
1436 first_line_is_new = true;
1437 }
1438
1439 // When inserting text starting with a newline, avoid auto-indenting the
1440 // previous line.
1441 if new_text.starts_with('\n') {
1442 range_of_insertion_to_indent.start += 1;
1443 first_line_is_new = true;
1444 }
1445
1446 // Avoid auto-indenting after the insertion.
1447 if let AutoindentMode::Block {
1448 original_indent_columns,
1449 } = &mode
1450 {
1451 original_indent_column =
1452 Some(original_indent_columns.get(ix).copied().unwrap_or_else(|| {
1453 indent_size_for_text(
1454 new_text[range_of_insertion_to_indent.clone()].chars(),
1455 )
1456 .len
1457 }));
1458 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
1459 range_of_insertion_to_indent.end -= 1;
1460 }
1461 }
1462
1463 AutoindentRequestEntry {
1464 first_line_is_new,
1465 original_indent_column,
1466 indent_size: before_edit.language_indent_size_at(range.start, cx),
1467 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
1468 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
1469 }
1470 })
1471 .collect();
1472
1473 self.autoindent_requests.push(Arc::new(AutoindentRequest {
1474 before_edit,
1475 entries,
1476 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
1477 }));
1478 }
1479
1480 self.end_transaction(cx);
1481 self.send_operation(Operation::Buffer(edit_operation), cx);
1482 Some(edit_id)
1483 }
1484
1485 fn did_edit(
1486 &mut self,
1487 old_version: &clock::Global,
1488 was_dirty: bool,
1489 cx: &mut ModelContext<Self>,
1490 ) {
1491 if self.edits_since::<usize>(old_version).next().is_none() {
1492 return;
1493 }
1494
1495 self.reparse(cx);
1496
1497 cx.emit(Event::Edited);
1498 if was_dirty != self.is_dirty() {
1499 cx.emit(Event::DirtyChanged);
1500 }
1501 cx.notify();
1502 }
1503
1504 pub fn apply_ops<I: IntoIterator<Item = Operation>>(
1505 &mut self,
1506 ops: I,
1507 cx: &mut ModelContext<Self>,
1508 ) -> Result<()> {
1509 self.pending_autoindent.take();
1510 let was_dirty = self.is_dirty();
1511 let old_version = self.version.clone();
1512 let mut deferred_ops = Vec::new();
1513 let buffer_ops = ops
1514 .into_iter()
1515 .filter_map(|op| match op {
1516 Operation::Buffer(op) => Some(op),
1517 _ => {
1518 if self.can_apply_op(&op) {
1519 self.apply_op(op, cx);
1520 } else {
1521 deferred_ops.push(op);
1522 }
1523 None
1524 }
1525 })
1526 .collect::<Vec<_>>();
1527 self.text.apply_ops(buffer_ops)?;
1528 self.deferred_ops.insert(deferred_ops);
1529 self.flush_deferred_ops(cx);
1530 self.did_edit(&old_version, was_dirty, cx);
1531 // Notify independently of whether the buffer was edited as the operations could include a
1532 // selection update.
1533 cx.notify();
1534 Ok(())
1535 }
1536
1537 fn flush_deferred_ops(&mut self, cx: &mut ModelContext<Self>) {
1538 let mut deferred_ops = Vec::new();
1539 for op in self.deferred_ops.drain().iter().cloned() {
1540 if self.can_apply_op(&op) {
1541 self.apply_op(op, cx);
1542 } else {
1543 deferred_ops.push(op);
1544 }
1545 }
1546 self.deferred_ops.insert(deferred_ops);
1547 }
1548
1549 fn can_apply_op(&self, operation: &Operation) -> bool {
1550 match operation {
1551 Operation::Buffer(_) => {
1552 unreachable!("buffer operations should never be applied at this layer")
1553 }
1554 Operation::UpdateDiagnostics {
1555 diagnostics: diagnostic_set,
1556 ..
1557 } => diagnostic_set.iter().all(|diagnostic| {
1558 self.text.can_resolve(&diagnostic.range.start)
1559 && self.text.can_resolve(&diagnostic.range.end)
1560 }),
1561 Operation::UpdateSelections { selections, .. } => selections
1562 .iter()
1563 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
1564 Operation::UpdateCompletionTriggers { .. } => true,
1565 }
1566 }
1567
1568 fn apply_op(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1569 match operation {
1570 Operation::Buffer(_) => {
1571 unreachable!("buffer operations should never be applied at this layer")
1572 }
1573 Operation::UpdateDiagnostics {
1574 server_id,
1575 diagnostics: diagnostic_set,
1576 lamport_timestamp,
1577 } => {
1578 let snapshot = self.snapshot();
1579 self.apply_diagnostic_update(
1580 server_id,
1581 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
1582 lamport_timestamp,
1583 cx,
1584 );
1585 }
1586 Operation::UpdateSelections {
1587 selections,
1588 lamport_timestamp,
1589 line_mode,
1590 cursor_shape,
1591 } => {
1592 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
1593 if set.lamport_timestamp > lamport_timestamp {
1594 return;
1595 }
1596 }
1597
1598 self.remote_selections.insert(
1599 lamport_timestamp.replica_id,
1600 SelectionSet {
1601 selections,
1602 lamport_timestamp,
1603 line_mode,
1604 cursor_shape,
1605 },
1606 );
1607 self.text.lamport_clock.observe(lamport_timestamp);
1608 self.selections_update_count += 1;
1609 }
1610 Operation::UpdateCompletionTriggers {
1611 triggers,
1612 lamport_timestamp,
1613 } => {
1614 self.completion_triggers = triggers;
1615 self.text.lamport_clock.observe(lamport_timestamp);
1616 }
1617 }
1618 }
1619
1620 fn apply_diagnostic_update(
1621 &mut self,
1622 server_id: LanguageServerId,
1623 diagnostics: DiagnosticSet,
1624 lamport_timestamp: clock::Lamport,
1625 cx: &mut ModelContext<Self>,
1626 ) {
1627 if lamport_timestamp > self.diagnostics_timestamp {
1628 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
1629 if diagnostics.len() == 0 {
1630 if let Ok(ix) = ix {
1631 self.diagnostics.remove(ix);
1632 }
1633 } else {
1634 match ix {
1635 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
1636 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
1637 };
1638 }
1639 self.diagnostics_timestamp = lamport_timestamp;
1640 self.diagnostics_update_count += 1;
1641 self.text.lamport_clock.observe(lamport_timestamp);
1642 cx.notify();
1643 cx.emit(Event::DiagnosticsUpdated);
1644 }
1645 }
1646
1647 fn send_operation(&mut self, operation: Operation, cx: &mut ModelContext<Self>) {
1648 cx.emit(Event::Operation(operation));
1649 }
1650
1651 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut ModelContext<Self>) {
1652 self.remote_selections.remove(&replica_id);
1653 cx.notify();
1654 }
1655
1656 pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1657 let was_dirty = self.is_dirty();
1658 let old_version = self.version.clone();
1659
1660 if let Some((transaction_id, operation)) = self.text.undo() {
1661 self.send_operation(Operation::Buffer(operation), cx);
1662 self.did_edit(&old_version, was_dirty, cx);
1663 Some(transaction_id)
1664 } else {
1665 None
1666 }
1667 }
1668
1669 pub fn undo_transaction(
1670 &mut self,
1671 transaction_id: TransactionId,
1672 cx: &mut ModelContext<Self>,
1673 ) -> bool {
1674 let was_dirty = self.is_dirty();
1675 let old_version = self.version.clone();
1676 if let Some(operation) = self.text.undo_transaction(transaction_id) {
1677 self.send_operation(Operation::Buffer(operation), cx);
1678 self.did_edit(&old_version, was_dirty, cx);
1679 true
1680 } else {
1681 false
1682 }
1683 }
1684
1685 pub fn undo_to_transaction(
1686 &mut self,
1687 transaction_id: TransactionId,
1688 cx: &mut ModelContext<Self>,
1689 ) -> bool {
1690 let was_dirty = self.is_dirty();
1691 let old_version = self.version.clone();
1692
1693 let operations = self.text.undo_to_transaction(transaction_id);
1694 let undone = !operations.is_empty();
1695 for operation in operations {
1696 self.send_operation(Operation::Buffer(operation), cx);
1697 }
1698 if undone {
1699 self.did_edit(&old_version, was_dirty, cx)
1700 }
1701 undone
1702 }
1703
1704 pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
1705 let was_dirty = self.is_dirty();
1706 let old_version = self.version.clone();
1707
1708 if let Some((transaction_id, operation)) = self.text.redo() {
1709 self.send_operation(Operation::Buffer(operation), cx);
1710 self.did_edit(&old_version, was_dirty, cx);
1711 Some(transaction_id)
1712 } else {
1713 None
1714 }
1715 }
1716
1717 pub fn redo_to_transaction(
1718 &mut self,
1719 transaction_id: TransactionId,
1720 cx: &mut ModelContext<Self>,
1721 ) -> bool {
1722 let was_dirty = self.is_dirty();
1723 let old_version = self.version.clone();
1724
1725 let operations = self.text.redo_to_transaction(transaction_id);
1726 let redone = !operations.is_empty();
1727 for operation in operations {
1728 self.send_operation(Operation::Buffer(operation), cx);
1729 }
1730 if redone {
1731 self.did_edit(&old_version, was_dirty, cx)
1732 }
1733 redone
1734 }
1735
1736 pub fn set_completion_triggers(&mut self, triggers: Vec<String>, cx: &mut ModelContext<Self>) {
1737 self.completion_triggers = triggers.clone();
1738 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
1739 self.send_operation(
1740 Operation::UpdateCompletionTriggers {
1741 triggers,
1742 lamport_timestamp: self.completion_triggers_timestamp,
1743 },
1744 cx,
1745 );
1746 cx.notify();
1747 }
1748
1749 pub fn completion_triggers(&self) -> &[String] {
1750 &self.completion_triggers
1751 }
1752}
1753
1754#[cfg(any(test, feature = "test-support"))]
1755impl Buffer {
1756 pub fn edit_via_marked_text(
1757 &mut self,
1758 marked_string: &str,
1759 autoindent_mode: Option<AutoindentMode>,
1760 cx: &mut ModelContext<Self>,
1761 ) {
1762 let edits = self.edits_for_marked_text(marked_string);
1763 self.edit(edits, autoindent_mode, cx);
1764 }
1765
1766 pub fn set_group_interval(&mut self, group_interval: Duration) {
1767 self.text.set_group_interval(group_interval);
1768 }
1769
1770 pub fn randomly_edit<T>(
1771 &mut self,
1772 rng: &mut T,
1773 old_range_count: usize,
1774 cx: &mut ModelContext<Self>,
1775 ) where
1776 T: rand::Rng,
1777 {
1778 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
1779 let mut last_end = None;
1780 for _ in 0..old_range_count {
1781 if last_end.map_or(false, |last_end| last_end >= self.len()) {
1782 break;
1783 }
1784
1785 let new_start = last_end.map_or(0, |last_end| last_end + 1);
1786 let mut range = self.random_byte_range(new_start, rng);
1787 if rng.gen_bool(0.2) {
1788 mem::swap(&mut range.start, &mut range.end);
1789 }
1790 last_end = Some(range.end);
1791
1792 let new_text_len = rng.gen_range(0..10);
1793 let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
1794
1795 edits.push((range, new_text));
1796 }
1797 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
1798 self.edit(edits, None, cx);
1799 }
1800
1801 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut ModelContext<Self>) {
1802 let was_dirty = self.is_dirty();
1803 let old_version = self.version.clone();
1804
1805 let ops = self.text.randomly_undo_redo(rng);
1806 if !ops.is_empty() {
1807 for op in ops {
1808 self.send_operation(Operation::Buffer(op), cx);
1809 self.did_edit(&old_version, was_dirty, cx);
1810 }
1811 }
1812 }
1813}
1814
1815impl EventEmitter for Buffer {
1816 type Event = Event;
1817}
1818
1819impl Deref for Buffer {
1820 type Target = TextBuffer;
1821
1822 fn deref(&self) -> &Self::Target {
1823 &self.text
1824 }
1825}
1826
1827impl BufferSnapshot {
1828 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
1829 indent_size_for_line(self, row)
1830 }
1831
1832 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
1833 let settings = language_settings(self.language_at(position), self.file(), cx);
1834 if settings.hard_tabs {
1835 IndentSize::tab()
1836 } else {
1837 IndentSize::spaces(settings.tab_size.get())
1838 }
1839 }
1840
1841 pub fn suggested_indents(
1842 &self,
1843 rows: impl Iterator<Item = u32>,
1844 single_indent_size: IndentSize,
1845 ) -> BTreeMap<u32, IndentSize> {
1846 let mut result = BTreeMap::new();
1847
1848 for row_range in contiguous_ranges(rows, 10) {
1849 let suggestions = match self.suggest_autoindents(row_range.clone()) {
1850 Some(suggestions) => suggestions,
1851 _ => break,
1852 };
1853
1854 for (row, suggestion) in row_range.zip(suggestions) {
1855 let indent_size = if let Some(suggestion) = suggestion {
1856 result
1857 .get(&suggestion.basis_row)
1858 .copied()
1859 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
1860 .with_delta(suggestion.delta, single_indent_size)
1861 } else {
1862 self.indent_size_for_line(row)
1863 };
1864
1865 result.insert(row, indent_size);
1866 }
1867 }
1868
1869 result
1870 }
1871
1872 fn suggest_autoindents(
1873 &self,
1874 row_range: Range<u32>,
1875 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
1876 let config = &self.language.as_ref()?.config;
1877 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
1878
1879 // Find the suggested indentation ranges based on the syntax tree.
1880 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
1881 let end = Point::new(row_range.end, 0);
1882 let range = (start..end).to_offset(&self.text);
1883 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1884 Some(&grammar.indents_config.as_ref()?.query)
1885 });
1886 let indent_configs = matches
1887 .grammars()
1888 .iter()
1889 .map(|grammar| grammar.indents_config.as_ref().unwrap())
1890 .collect::<Vec<_>>();
1891
1892 let mut indent_ranges = Vec::<Range<Point>>::new();
1893 let mut outdent_positions = Vec::<Point>::new();
1894 while let Some(mat) = matches.peek() {
1895 let mut start: Option<Point> = None;
1896 let mut end: Option<Point> = None;
1897
1898 let config = &indent_configs[mat.grammar_index];
1899 for capture in mat.captures {
1900 if capture.index == config.indent_capture_ix {
1901 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
1902 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
1903 } else if Some(capture.index) == config.start_capture_ix {
1904 start = Some(Point::from_ts_point(capture.node.end_position()));
1905 } else if Some(capture.index) == config.end_capture_ix {
1906 end = Some(Point::from_ts_point(capture.node.start_position()));
1907 } else if Some(capture.index) == config.outdent_capture_ix {
1908 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
1909 }
1910 }
1911
1912 matches.advance();
1913 if let Some((start, end)) = start.zip(end) {
1914 if start.row == end.row {
1915 continue;
1916 }
1917
1918 let range = start..end;
1919 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
1920 Err(ix) => indent_ranges.insert(ix, range),
1921 Ok(ix) => {
1922 let prev_range = &mut indent_ranges[ix];
1923 prev_range.end = prev_range.end.max(range.end);
1924 }
1925 }
1926 }
1927 }
1928
1929 let mut error_ranges = Vec::<Range<Point>>::new();
1930 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
1931 Some(&grammar.error_query)
1932 });
1933 while let Some(mat) = matches.peek() {
1934 let node = mat.captures[0].node;
1935 let start = Point::from_ts_point(node.start_position());
1936 let end = Point::from_ts_point(node.end_position());
1937 let range = start..end;
1938 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
1939 Ok(ix) | Err(ix) => ix,
1940 };
1941 let mut end_ix = ix;
1942 while let Some(existing_range) = error_ranges.get(end_ix) {
1943 if existing_range.end < end {
1944 end_ix += 1;
1945 } else {
1946 break;
1947 }
1948 }
1949 error_ranges.splice(ix..end_ix, [range]);
1950 matches.advance();
1951 }
1952
1953 outdent_positions.sort();
1954 for outdent_position in outdent_positions {
1955 // find the innermost indent range containing this outdent_position
1956 // set its end to the outdent position
1957 if let Some(range_to_truncate) = indent_ranges
1958 .iter_mut()
1959 .filter(|indent_range| indent_range.contains(&outdent_position))
1960 .last()
1961 {
1962 range_to_truncate.end = outdent_position;
1963 }
1964 }
1965
1966 // Find the suggested indentation increases and decreased based on regexes.
1967 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
1968 self.for_each_line(
1969 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
1970 ..Point::new(row_range.end, 0),
1971 |row, line| {
1972 if config
1973 .decrease_indent_pattern
1974 .as_ref()
1975 .map_or(false, |regex| regex.is_match(line))
1976 {
1977 indent_change_rows.push((row, Ordering::Less));
1978 }
1979 if config
1980 .increase_indent_pattern
1981 .as_ref()
1982 .map_or(false, |regex| regex.is_match(line))
1983 {
1984 indent_change_rows.push((row + 1, Ordering::Greater));
1985 }
1986 },
1987 );
1988
1989 let mut indent_changes = indent_change_rows.into_iter().peekable();
1990 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
1991 prev_non_blank_row.unwrap_or(0)
1992 } else {
1993 row_range.start.saturating_sub(1)
1994 };
1995 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
1996 Some(row_range.map(move |row| {
1997 let row_start = Point::new(row, self.indent_size_for_line(row).len);
1998
1999 let mut indent_from_prev_row = false;
2000 let mut outdent_from_prev_row = false;
2001 let mut outdent_to_row = u32::MAX;
2002
2003 while let Some((indent_row, delta)) = indent_changes.peek() {
2004 match indent_row.cmp(&row) {
2005 Ordering::Equal => match delta {
2006 Ordering::Less => outdent_from_prev_row = true,
2007 Ordering::Greater => indent_from_prev_row = true,
2008 _ => {}
2009 },
2010
2011 Ordering::Greater => break,
2012 Ordering::Less => {}
2013 }
2014
2015 indent_changes.next();
2016 }
2017
2018 for range in &indent_ranges {
2019 if range.start.row >= row {
2020 break;
2021 }
2022 if range.start.row == prev_row && range.end > row_start {
2023 indent_from_prev_row = true;
2024 }
2025 if range.end > prev_row_start && range.end <= row_start {
2026 outdent_to_row = outdent_to_row.min(range.start.row);
2027 }
2028 }
2029
2030 let within_error = error_ranges
2031 .iter()
2032 .any(|e| e.start.row < row && e.end > row_start);
2033
2034 let suggestion = if outdent_to_row == prev_row
2035 || (outdent_from_prev_row && indent_from_prev_row)
2036 {
2037 Some(IndentSuggestion {
2038 basis_row: prev_row,
2039 delta: Ordering::Equal,
2040 within_error,
2041 })
2042 } else if indent_from_prev_row {
2043 Some(IndentSuggestion {
2044 basis_row: prev_row,
2045 delta: Ordering::Greater,
2046 within_error,
2047 })
2048 } else if outdent_to_row < prev_row {
2049 Some(IndentSuggestion {
2050 basis_row: outdent_to_row,
2051 delta: Ordering::Equal,
2052 within_error,
2053 })
2054 } else if outdent_from_prev_row {
2055 Some(IndentSuggestion {
2056 basis_row: prev_row,
2057 delta: Ordering::Less,
2058 within_error,
2059 })
2060 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
2061 {
2062 Some(IndentSuggestion {
2063 basis_row: prev_row,
2064 delta: Ordering::Equal,
2065 within_error,
2066 })
2067 } else {
2068 None
2069 };
2070
2071 prev_row = row;
2072 prev_row_start = row_start;
2073 suggestion
2074 }))
2075 }
2076
2077 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
2078 while row > 0 {
2079 row -= 1;
2080 if !self.is_line_blank(row) {
2081 return Some(row);
2082 }
2083 }
2084 None
2085 }
2086
2087 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
2088 let range = range.start.to_offset(self)..range.end.to_offset(self);
2089
2090 let mut syntax = None;
2091 let mut diagnostic_endpoints = Vec::new();
2092 if language_aware {
2093 let captures = self.syntax.captures(range.clone(), &self.text, |grammar| {
2094 grammar.highlights_query.as_ref()
2095 });
2096 let highlight_maps = captures
2097 .grammars()
2098 .into_iter()
2099 .map(|grammar| grammar.highlight_map())
2100 .collect();
2101 syntax = Some((captures, highlight_maps));
2102 for entry in self.diagnostics_in_range::<_, usize>(range.clone(), false) {
2103 diagnostic_endpoints.push(DiagnosticEndpoint {
2104 offset: entry.range.start,
2105 is_start: true,
2106 severity: entry.diagnostic.severity,
2107 is_unnecessary: entry.diagnostic.is_unnecessary,
2108 });
2109 diagnostic_endpoints.push(DiagnosticEndpoint {
2110 offset: entry.range.end,
2111 is_start: false,
2112 severity: entry.diagnostic.severity,
2113 is_unnecessary: entry.diagnostic.is_unnecessary,
2114 });
2115 }
2116 diagnostic_endpoints
2117 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
2118 }
2119
2120 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostic_endpoints)
2121 }
2122
2123 pub fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
2124 let mut line = String::new();
2125 let mut row = range.start.row;
2126 for chunk in self
2127 .as_rope()
2128 .chunks_in_range(range.to_offset(self))
2129 .chain(["\n"])
2130 {
2131 for (newline_ix, text) in chunk.split('\n').enumerate() {
2132 if newline_ix > 0 {
2133 callback(row, &line);
2134 row += 1;
2135 line.clear();
2136 }
2137 line.push_str(text);
2138 }
2139 }
2140 }
2141
2142 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayerInfo> + '_ {
2143 self.syntax.layers_for_range(0..self.len(), &self.text)
2144 }
2145
2146 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayerInfo> {
2147 let offset = position.to_offset(self);
2148 self.syntax
2149 .layers_for_range(offset..offset, &self.text)
2150 .filter(|l| l.node().end_byte() > offset)
2151 .last()
2152 }
2153
2154 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
2155 self.syntax_layer_at(position)
2156 .map(|info| info.language)
2157 .or(self.language.as_ref())
2158 }
2159
2160 pub fn settings_at<'a, D: ToOffset>(
2161 &self,
2162 position: D,
2163 cx: &'a AppContext,
2164 ) -> &'a LanguageSettings {
2165 language_settings(self.language_at(position), self.file.as_ref(), cx)
2166 }
2167
2168 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
2169 let offset = position.to_offset(self);
2170 let mut scope = None;
2171 let mut smallest_range: Option<Range<usize>> = None;
2172
2173 // Use the layer that has the smallest node intersecting the given point.
2174 for layer in self.syntax.layers_for_range(offset..offset, &self.text) {
2175 let mut cursor = layer.node().walk();
2176
2177 let mut range = None;
2178 loop {
2179 let child_range = cursor.node().byte_range();
2180 if !child_range.to_inclusive().contains(&offset) {
2181 break;
2182 }
2183
2184 range = Some(child_range);
2185 if cursor.goto_first_child_for_byte(offset).is_none() {
2186 break;
2187 }
2188 }
2189
2190 if let Some(range) = range {
2191 if smallest_range
2192 .as_ref()
2193 .map_or(true, |smallest_range| range.len() < smallest_range.len())
2194 {
2195 smallest_range = Some(range);
2196 scope = Some(LanguageScope {
2197 language: layer.language.clone(),
2198 override_id: layer.override_id(offset, &self.text),
2199 });
2200 }
2201 }
2202 }
2203
2204 scope.or_else(|| {
2205 self.language.clone().map(|language| LanguageScope {
2206 language,
2207 override_id: None,
2208 })
2209 })
2210 }
2211
2212 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
2213 let mut start = start.to_offset(self);
2214 let mut end = start;
2215 let mut next_chars = self.chars_at(start).peekable();
2216 let mut prev_chars = self.reversed_chars_at(start).peekable();
2217
2218 let scope = self.language_scope_at(start);
2219 let kind = |c| char_kind(&scope, c);
2220 let word_kind = cmp::max(
2221 prev_chars.peek().copied().map(kind),
2222 next_chars.peek().copied().map(kind),
2223 );
2224
2225 for ch in prev_chars {
2226 if Some(kind(ch)) == word_kind && ch != '\n' {
2227 start -= ch.len_utf8();
2228 } else {
2229 break;
2230 }
2231 }
2232
2233 for ch in next_chars {
2234 if Some(kind(ch)) == word_kind && ch != '\n' {
2235 end += ch.len_utf8();
2236 } else {
2237 break;
2238 }
2239 }
2240
2241 (start..end, word_kind)
2242 }
2243
2244 pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
2245 let range = range.start.to_offset(self)..range.end.to_offset(self);
2246 let mut result: Option<Range<usize>> = None;
2247 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
2248 let mut cursor = layer.node().walk();
2249
2250 // Descend to the first leaf that touches the start of the range,
2251 // and if the range is non-empty, extends beyond the start.
2252 while cursor.goto_first_child_for_byte(range.start).is_some() {
2253 if !range.is_empty() && cursor.node().end_byte() == range.start {
2254 cursor.goto_next_sibling();
2255 }
2256 }
2257
2258 // Ascend to the smallest ancestor that strictly contains the range.
2259 loop {
2260 let node_range = cursor.node().byte_range();
2261 if node_range.start <= range.start
2262 && node_range.end >= range.end
2263 && node_range.len() > range.len()
2264 {
2265 break;
2266 }
2267 if !cursor.goto_parent() {
2268 continue 'outer;
2269 }
2270 }
2271
2272 let left_node = cursor.node();
2273 let mut layer_result = left_node.byte_range();
2274
2275 // For an empty range, try to find another node immediately to the right of the range.
2276 if left_node.end_byte() == range.start {
2277 let mut right_node = None;
2278 while !cursor.goto_next_sibling() {
2279 if !cursor.goto_parent() {
2280 break;
2281 }
2282 }
2283
2284 while cursor.node().start_byte() == range.start {
2285 right_node = Some(cursor.node());
2286 if !cursor.goto_first_child() {
2287 break;
2288 }
2289 }
2290
2291 // If there is a candidate node on both sides of the (empty) range, then
2292 // decide between the two by favoring a named node over an anonymous token.
2293 // If both nodes are the same in that regard, favor the right one.
2294 if let Some(right_node) = right_node {
2295 if right_node.is_named() || !left_node.is_named() {
2296 layer_result = right_node.byte_range();
2297 }
2298 }
2299 }
2300
2301 if let Some(previous_result) = &result {
2302 if previous_result.len() < layer_result.len() {
2303 continue;
2304 }
2305 }
2306 result = Some(layer_result);
2307 }
2308
2309 result
2310 }
2311
2312 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
2313 self.outline_items_containing(0..self.len(), true, theme)
2314 .map(Outline::new)
2315 }
2316
2317 pub fn symbols_containing<T: ToOffset>(
2318 &self,
2319 position: T,
2320 theme: Option<&SyntaxTheme>,
2321 ) -> Option<Vec<OutlineItem<Anchor>>> {
2322 let position = position.to_offset(self);
2323 let mut items = self.outline_items_containing(
2324 position.saturating_sub(1)..self.len().min(position + 1),
2325 false,
2326 theme,
2327 )?;
2328 let mut prev_depth = None;
2329 items.retain(|item| {
2330 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
2331 prev_depth = Some(item.depth);
2332 result
2333 });
2334 Some(items)
2335 }
2336
2337 fn outline_items_containing(
2338 &self,
2339 range: Range<usize>,
2340 include_extra_context: bool,
2341 theme: Option<&SyntaxTheme>,
2342 ) -> Option<Vec<OutlineItem<Anchor>>> {
2343 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2344 grammar.outline_config.as_ref().map(|c| &c.query)
2345 });
2346 let configs = matches
2347 .grammars()
2348 .iter()
2349 .map(|g| g.outline_config.as_ref().unwrap())
2350 .collect::<Vec<_>>();
2351
2352 let mut stack = Vec::<Range<usize>>::new();
2353 let mut items = Vec::new();
2354 while let Some(mat) = matches.peek() {
2355 let config = &configs[mat.grammar_index];
2356 let item_node = mat.captures.iter().find_map(|cap| {
2357 if cap.index == config.item_capture_ix {
2358 Some(cap.node)
2359 } else {
2360 None
2361 }
2362 })?;
2363
2364 let item_range = item_node.byte_range();
2365 if item_range.end < range.start || item_range.start > range.end {
2366 matches.advance();
2367 continue;
2368 }
2369
2370 let mut buffer_ranges = Vec::new();
2371 for capture in mat.captures {
2372 let node_is_name;
2373 if capture.index == config.name_capture_ix {
2374 node_is_name = true;
2375 } else if Some(capture.index) == config.context_capture_ix
2376 || (Some(capture.index) == config.extra_context_capture_ix
2377 && include_extra_context)
2378 {
2379 node_is_name = false;
2380 } else {
2381 continue;
2382 }
2383
2384 let mut range = capture.node.start_byte()..capture.node.end_byte();
2385 let start = capture.node.start_position();
2386 if capture.node.end_position().row > start.row {
2387 range.end =
2388 range.start + self.line_len(start.row as u32) as usize - start.column;
2389 }
2390
2391 buffer_ranges.push((range, node_is_name));
2392 }
2393
2394 if buffer_ranges.is_empty() {
2395 continue;
2396 }
2397
2398 let mut text = String::new();
2399 let mut highlight_ranges = Vec::new();
2400 let mut name_ranges = Vec::new();
2401 let mut chunks = self.chunks(
2402 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
2403 true,
2404 );
2405 let mut last_buffer_range_end = 0;
2406 for (buffer_range, is_name) in buffer_ranges {
2407 if !text.is_empty() && buffer_range.start > last_buffer_range_end {
2408 text.push(' ');
2409 }
2410 last_buffer_range_end = buffer_range.end;
2411 if is_name {
2412 let mut start = text.len();
2413 let end = start + buffer_range.len();
2414
2415 // When multiple names are captured, then the matcheable text
2416 // includes the whitespace in between the names.
2417 if !name_ranges.is_empty() {
2418 start -= 1;
2419 }
2420
2421 name_ranges.push(start..end);
2422 }
2423
2424 let mut offset = buffer_range.start;
2425 chunks.seek(offset);
2426 for mut chunk in chunks.by_ref() {
2427 if chunk.text.len() > buffer_range.end - offset {
2428 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
2429 offset = buffer_range.end;
2430 } else {
2431 offset += chunk.text.len();
2432 }
2433 let style = chunk
2434 .syntax_highlight_id
2435 .zip(theme)
2436 .and_then(|(highlight, theme)| highlight.style(theme));
2437 if let Some(style) = style {
2438 let start = text.len();
2439 let end = start + chunk.text.len();
2440 highlight_ranges.push((start..end, style));
2441 }
2442 text.push_str(chunk.text);
2443 if offset >= buffer_range.end {
2444 break;
2445 }
2446 }
2447 }
2448
2449 matches.advance();
2450 while stack.last().map_or(false, |prev_range| {
2451 prev_range.start > item_range.start || prev_range.end < item_range.end
2452 }) {
2453 stack.pop();
2454 }
2455 stack.push(item_range.clone());
2456
2457 items.push(OutlineItem {
2458 depth: stack.len() - 1,
2459 range: self.anchor_after(item_range.start)..self.anchor_before(item_range.end),
2460 text,
2461 highlight_ranges,
2462 name_ranges,
2463 })
2464 }
2465 Some(items)
2466 }
2467
2468 pub fn matches(
2469 &self,
2470 range: Range<usize>,
2471 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
2472 ) -> SyntaxMapMatches {
2473 self.syntax.matches(range, self, query)
2474 }
2475
2476 /// Returns bracket range pairs overlapping or adjacent to `range`
2477 pub fn bracket_ranges<'a, T: ToOffset>(
2478 &'a self,
2479 range: Range<T>,
2480 ) -> impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a {
2481 // Find bracket pairs that *inclusively* contain the given range.
2482 let range = range.start.to_offset(self).saturating_sub(1)
2483 ..self.len().min(range.end.to_offset(self) + 1);
2484
2485 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2486 grammar.brackets_config.as_ref().map(|c| &c.query)
2487 });
2488 let configs = matches
2489 .grammars()
2490 .iter()
2491 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
2492 .collect::<Vec<_>>();
2493
2494 iter::from_fn(move || {
2495 while let Some(mat) = matches.peek() {
2496 let mut open = None;
2497 let mut close = None;
2498 let config = &configs[mat.grammar_index];
2499 for capture in mat.captures {
2500 if capture.index == config.open_capture_ix {
2501 open = Some(capture.node.byte_range());
2502 } else if capture.index == config.close_capture_ix {
2503 close = Some(capture.node.byte_range());
2504 }
2505 }
2506
2507 matches.advance();
2508
2509 let Some((open, close)) = open.zip(close) else {
2510 continue;
2511 };
2512
2513 let bracket_range = open.start..=close.end;
2514 if !bracket_range.overlaps(&range) {
2515 continue;
2516 }
2517
2518 return Some((open, close));
2519 }
2520 None
2521 })
2522 }
2523
2524 #[allow(clippy::type_complexity)]
2525 pub fn remote_selections_in_range(
2526 &self,
2527 range: Range<Anchor>,
2528 ) -> impl Iterator<
2529 Item = (
2530 ReplicaId,
2531 bool,
2532 CursorShape,
2533 impl Iterator<Item = &Selection<Anchor>> + '_,
2534 ),
2535 > + '_ {
2536 self.remote_selections
2537 .iter()
2538 .filter(|(replica_id, set)| {
2539 **replica_id != self.text.replica_id() && !set.selections.is_empty()
2540 })
2541 .map(move |(replica_id, set)| {
2542 let start_ix = match set.selections.binary_search_by(|probe| {
2543 probe.end.cmp(&range.start, self).then(Ordering::Greater)
2544 }) {
2545 Ok(ix) | Err(ix) => ix,
2546 };
2547 let end_ix = match set.selections.binary_search_by(|probe| {
2548 probe.start.cmp(&range.end, self).then(Ordering::Less)
2549 }) {
2550 Ok(ix) | Err(ix) => ix,
2551 };
2552
2553 (
2554 *replica_id,
2555 set.line_mode,
2556 set.cursor_shape,
2557 set.selections[start_ix..end_ix].iter(),
2558 )
2559 })
2560 }
2561
2562 pub fn git_diff_hunks_in_row_range<'a>(
2563 &'a self,
2564 range: Range<u32>,
2565 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2566 self.git_diff.hunks_in_row_range(range, self)
2567 }
2568
2569 pub fn git_diff_hunks_intersecting_range<'a>(
2570 &'a self,
2571 range: Range<Anchor>,
2572 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2573 self.git_diff.hunks_intersecting_range(range, self)
2574 }
2575
2576 pub fn git_diff_hunks_intersecting_range_rev<'a>(
2577 &'a self,
2578 range: Range<Anchor>,
2579 ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
2580 self.git_diff.hunks_intersecting_range_rev(range, self)
2581 }
2582
2583 pub fn diagnostics_in_range<'a, T, O>(
2584 &'a self,
2585 search_range: Range<T>,
2586 reversed: bool,
2587 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2588 where
2589 T: 'a + Clone + ToOffset,
2590 O: 'a + FromAnchor + Ord,
2591 {
2592 let mut iterators: Vec<_> = self
2593 .diagnostics
2594 .iter()
2595 .map(|(_, collection)| {
2596 collection
2597 .range::<T, O>(search_range.clone(), self, true, reversed)
2598 .peekable()
2599 })
2600 .collect();
2601
2602 std::iter::from_fn(move || {
2603 let (next_ix, _) = iterators
2604 .iter_mut()
2605 .enumerate()
2606 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
2607 .min_by(|(_, a), (_, b)| a.range.start.cmp(&b.range.start))?;
2608 iterators[next_ix].next()
2609 })
2610 }
2611
2612 pub fn diagnostic_groups(
2613 &self,
2614 language_server_id: Option<LanguageServerId>,
2615 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
2616 let mut groups = Vec::new();
2617
2618 if let Some(language_server_id) = language_server_id {
2619 if let Ok(ix) = self
2620 .diagnostics
2621 .binary_search_by_key(&language_server_id, |e| e.0)
2622 {
2623 self.diagnostics[ix]
2624 .1
2625 .groups(language_server_id, &mut groups, self);
2626 }
2627 } else {
2628 for (language_server_id, diagnostics) in self.diagnostics.iter() {
2629 diagnostics.groups(*language_server_id, &mut groups, self);
2630 }
2631 }
2632
2633 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
2634 let a_start = &group_a.entries[group_a.primary_ix].range.start;
2635 let b_start = &group_b.entries[group_b.primary_ix].range.start;
2636 a_start.cmp(b_start, self).then_with(|| id_a.cmp(&id_b))
2637 });
2638
2639 groups
2640 }
2641
2642 pub fn diagnostic_group<'a, O>(
2643 &'a self,
2644 group_id: usize,
2645 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
2646 where
2647 O: 'a + FromAnchor,
2648 {
2649 self.diagnostics
2650 .iter()
2651 .flat_map(move |(_, set)| set.group(group_id, self))
2652 }
2653
2654 pub fn diagnostics_update_count(&self) -> usize {
2655 self.diagnostics_update_count
2656 }
2657
2658 pub fn parse_count(&self) -> usize {
2659 self.parse_count
2660 }
2661
2662 pub fn selections_update_count(&self) -> usize {
2663 self.selections_update_count
2664 }
2665
2666 pub fn file(&self) -> Option<&Arc<dyn File>> {
2667 self.file.as_ref()
2668 }
2669
2670 pub fn resolve_file_path(&self, cx: &AppContext, include_root: bool) -> Option<PathBuf> {
2671 if let Some(file) = self.file() {
2672 if file.path().file_name().is_none() || include_root {
2673 Some(file.full_path(cx))
2674 } else {
2675 Some(file.path().to_path_buf())
2676 }
2677 } else {
2678 None
2679 }
2680 }
2681
2682 pub fn file_update_count(&self) -> usize {
2683 self.file_update_count
2684 }
2685
2686 pub fn git_diff_update_count(&self) -> usize {
2687 self.git_diff_update_count
2688 }
2689}
2690
2691fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
2692 indent_size_for_text(text.chars_at(Point::new(row, 0)))
2693}
2694
2695pub fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
2696 let mut result = IndentSize::spaces(0);
2697 for c in text {
2698 let kind = match c {
2699 ' ' => IndentKind::Space,
2700 '\t' => IndentKind::Tab,
2701 _ => break,
2702 };
2703 if result.len == 0 {
2704 result.kind = kind;
2705 }
2706 result.len += 1;
2707 }
2708 result
2709}
2710
2711impl Clone for BufferSnapshot {
2712 fn clone(&self) -> Self {
2713 Self {
2714 text: self.text.clone(),
2715 git_diff: self.git_diff.clone(),
2716 syntax: self.syntax.clone(),
2717 file: self.file.clone(),
2718 remote_selections: self.remote_selections.clone(),
2719 diagnostics: self.diagnostics.clone(),
2720 selections_update_count: self.selections_update_count,
2721 diagnostics_update_count: self.diagnostics_update_count,
2722 file_update_count: self.file_update_count,
2723 git_diff_update_count: self.git_diff_update_count,
2724 language: self.language.clone(),
2725 parse_count: self.parse_count,
2726 }
2727 }
2728}
2729
2730impl Deref for BufferSnapshot {
2731 type Target = text::BufferSnapshot;
2732
2733 fn deref(&self) -> &Self::Target {
2734 &self.text
2735 }
2736}
2737
2738unsafe impl<'a> Send for BufferChunks<'a> {}
2739
2740impl<'a> BufferChunks<'a> {
2741 pub(crate) fn new(
2742 text: &'a Rope,
2743 range: Range<usize>,
2744 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
2745 diagnostic_endpoints: Vec<DiagnosticEndpoint>,
2746 ) -> Self {
2747 let mut highlights = None;
2748 if let Some((captures, highlight_maps)) = syntax {
2749 highlights = Some(BufferChunkHighlights {
2750 captures,
2751 next_capture: None,
2752 stack: Default::default(),
2753 highlight_maps,
2754 })
2755 }
2756
2757 let diagnostic_endpoints = diagnostic_endpoints.into_iter().peekable();
2758 let chunks = text.chunks_in_range(range.clone());
2759
2760 BufferChunks {
2761 range,
2762 chunks,
2763 diagnostic_endpoints,
2764 error_depth: 0,
2765 warning_depth: 0,
2766 information_depth: 0,
2767 hint_depth: 0,
2768 unnecessary_depth: 0,
2769 highlights,
2770 }
2771 }
2772
2773 pub fn seek(&mut self, offset: usize) {
2774 self.range.start = offset;
2775 self.chunks.seek(self.range.start);
2776 if let Some(highlights) = self.highlights.as_mut() {
2777 highlights
2778 .stack
2779 .retain(|(end_offset, _)| *end_offset > offset);
2780 if let Some(capture) = &highlights.next_capture {
2781 if offset >= capture.node.start_byte() {
2782 let next_capture_end = capture.node.end_byte();
2783 if offset < next_capture_end {
2784 highlights.stack.push((
2785 next_capture_end,
2786 highlights.highlight_maps[capture.grammar_index].get(capture.index),
2787 ));
2788 }
2789 highlights.next_capture.take();
2790 }
2791 }
2792 highlights.captures.set_byte_range(self.range.clone());
2793 }
2794 }
2795
2796 pub fn offset(&self) -> usize {
2797 self.range.start
2798 }
2799
2800 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
2801 let depth = match endpoint.severity {
2802 DiagnosticSeverity::ERROR => &mut self.error_depth,
2803 DiagnosticSeverity::WARNING => &mut self.warning_depth,
2804 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
2805 DiagnosticSeverity::HINT => &mut self.hint_depth,
2806 _ => return,
2807 };
2808 if endpoint.is_start {
2809 *depth += 1;
2810 } else {
2811 *depth -= 1;
2812 }
2813
2814 if endpoint.is_unnecessary {
2815 if endpoint.is_start {
2816 self.unnecessary_depth += 1;
2817 } else {
2818 self.unnecessary_depth -= 1;
2819 }
2820 }
2821 }
2822
2823 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
2824 if self.error_depth > 0 {
2825 Some(DiagnosticSeverity::ERROR)
2826 } else if self.warning_depth > 0 {
2827 Some(DiagnosticSeverity::WARNING)
2828 } else if self.information_depth > 0 {
2829 Some(DiagnosticSeverity::INFORMATION)
2830 } else if self.hint_depth > 0 {
2831 Some(DiagnosticSeverity::HINT)
2832 } else {
2833 None
2834 }
2835 }
2836
2837 fn current_code_is_unnecessary(&self) -> bool {
2838 self.unnecessary_depth > 0
2839 }
2840}
2841
2842impl<'a> Iterator for BufferChunks<'a> {
2843 type Item = Chunk<'a>;
2844
2845 fn next(&mut self) -> Option<Self::Item> {
2846 let mut next_capture_start = usize::MAX;
2847 let mut next_diagnostic_endpoint = usize::MAX;
2848
2849 if let Some(highlights) = self.highlights.as_mut() {
2850 while let Some((parent_capture_end, _)) = highlights.stack.last() {
2851 if *parent_capture_end <= self.range.start {
2852 highlights.stack.pop();
2853 } else {
2854 break;
2855 }
2856 }
2857
2858 if highlights.next_capture.is_none() {
2859 highlights.next_capture = highlights.captures.next();
2860 }
2861
2862 while let Some(capture) = highlights.next_capture.as_ref() {
2863 if self.range.start < capture.node.start_byte() {
2864 next_capture_start = capture.node.start_byte();
2865 break;
2866 } else {
2867 let highlight_id =
2868 highlights.highlight_maps[capture.grammar_index].get(capture.index);
2869 highlights
2870 .stack
2871 .push((capture.node.end_byte(), highlight_id));
2872 highlights.next_capture = highlights.captures.next();
2873 }
2874 }
2875 }
2876
2877 while let Some(endpoint) = self.diagnostic_endpoints.peek().copied() {
2878 if endpoint.offset <= self.range.start {
2879 self.update_diagnostic_depths(endpoint);
2880 self.diagnostic_endpoints.next();
2881 } else {
2882 next_diagnostic_endpoint = endpoint.offset;
2883 break;
2884 }
2885 }
2886
2887 if let Some(chunk) = self.chunks.peek() {
2888 let chunk_start = self.range.start;
2889 let mut chunk_end = (self.chunks.offset() + chunk.len())
2890 .min(next_capture_start)
2891 .min(next_diagnostic_endpoint);
2892 let mut highlight_id = None;
2893 if let Some(highlights) = self.highlights.as_ref() {
2894 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
2895 chunk_end = chunk_end.min(*parent_capture_end);
2896 highlight_id = Some(*parent_highlight_id);
2897 }
2898 }
2899
2900 let slice =
2901 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
2902 self.range.start = chunk_end;
2903 if self.range.start == self.chunks.offset() + chunk.len() {
2904 self.chunks.next().unwrap();
2905 }
2906
2907 Some(Chunk {
2908 text: slice,
2909 syntax_highlight_id: highlight_id,
2910 diagnostic_severity: self.current_diagnostic_severity(),
2911 is_unnecessary: self.current_code_is_unnecessary(),
2912 ..Default::default()
2913 })
2914 } else {
2915 None
2916 }
2917 }
2918}
2919
2920impl operation_queue::Operation for Operation {
2921 fn lamport_timestamp(&self) -> clock::Lamport {
2922 match self {
2923 Operation::Buffer(_) => {
2924 unreachable!("buffer operations should never be deferred at this layer")
2925 }
2926 Operation::UpdateDiagnostics {
2927 lamport_timestamp, ..
2928 }
2929 | Operation::UpdateSelections {
2930 lamport_timestamp, ..
2931 }
2932 | Operation::UpdateCompletionTriggers {
2933 lamport_timestamp, ..
2934 } => *lamport_timestamp,
2935 }
2936 }
2937}
2938
2939impl Default for Diagnostic {
2940 fn default() -> Self {
2941 Self {
2942 source: Default::default(),
2943 code: None,
2944 severity: DiagnosticSeverity::ERROR,
2945 message: Default::default(),
2946 group_id: 0,
2947 is_primary: false,
2948 is_valid: true,
2949 is_disk_based: false,
2950 is_unnecessary: false,
2951 }
2952 }
2953}
2954
2955impl IndentSize {
2956 pub fn spaces(len: u32) -> Self {
2957 Self {
2958 len,
2959 kind: IndentKind::Space,
2960 }
2961 }
2962
2963 pub fn tab() -> Self {
2964 Self {
2965 len: 1,
2966 kind: IndentKind::Tab,
2967 }
2968 }
2969
2970 pub fn chars(&self) -> impl Iterator<Item = char> {
2971 iter::repeat(self.char()).take(self.len as usize)
2972 }
2973
2974 pub fn char(&self) -> char {
2975 match self.kind {
2976 IndentKind::Space => ' ',
2977 IndentKind::Tab => '\t',
2978 }
2979 }
2980
2981 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
2982 match direction {
2983 Ordering::Less => {
2984 if self.kind == size.kind && self.len >= size.len {
2985 self.len -= size.len;
2986 }
2987 }
2988 Ordering::Equal => {}
2989 Ordering::Greater => {
2990 if self.len == 0 {
2991 self = size;
2992 } else if self.kind == size.kind {
2993 self.len += size.len;
2994 }
2995 }
2996 }
2997 self
2998 }
2999}
3000
3001impl Completion {
3002 pub fn sort_key(&self) -> (usize, &str) {
3003 let kind_key = match self.lsp_completion.kind {
3004 Some(lsp2::CompletionItemKind::VARIABLE) => 0,
3005 _ => 1,
3006 };
3007 (kind_key, &self.label.text[self.label.filter_range.clone()])
3008 }
3009
3010 pub fn is_snippet(&self) -> bool {
3011 self.lsp_completion.insert_text_format == Some(lsp2::InsertTextFormat::SNIPPET)
3012 }
3013}
3014
3015pub fn contiguous_ranges(
3016 values: impl Iterator<Item = u32>,
3017 max_len: usize,
3018) -> impl Iterator<Item = Range<u32>> {
3019 let mut values = values;
3020 let mut current_range: Option<Range<u32>> = None;
3021 std::iter::from_fn(move || loop {
3022 if let Some(value) = values.next() {
3023 if let Some(range) = &mut current_range {
3024 if value == range.end && range.len() < max_len {
3025 range.end += 1;
3026 continue;
3027 }
3028 }
3029
3030 let prev_range = current_range.clone();
3031 current_range = Some(value..(value + 1));
3032 if prev_range.is_some() {
3033 return prev_range;
3034 }
3035 } else {
3036 return current_range.take();
3037 }
3038 })
3039}
3040
3041pub fn char_kind(scope: &Option<LanguageScope>, c: char) -> CharKind {
3042 if c.is_whitespace() {
3043 return CharKind::Whitespace;
3044 } else if c.is_alphanumeric() || c == '_' {
3045 return CharKind::Word;
3046 }
3047
3048 if let Some(scope) = scope {
3049 if let Some(characters) = scope.word_characters() {
3050 if characters.contains(&c) {
3051 return CharKind::Word;
3052 }
3053 }
3054 }
3055
3056 CharKind::Punctuation
3057}
3058
3059/// Find all of the ranges of whitespace that occur at the ends of lines
3060/// in the given rope.
3061///
3062/// This could also be done with a regex search, but this implementation
3063/// avoids copying text.
3064pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
3065 let mut ranges = Vec::new();
3066
3067 let mut offset = 0;
3068 let mut prev_chunk_trailing_whitespace_range = 0..0;
3069 for chunk in rope.chunks() {
3070 let mut prev_line_trailing_whitespace_range = 0..0;
3071 for (i, line) in chunk.split('\n').enumerate() {
3072 let line_end_offset = offset + line.len();
3073 let trimmed_line_len = line.trim_end_matches(|c| matches!(c, ' ' | '\t')).len();
3074 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
3075
3076 if i == 0 && trimmed_line_len == 0 {
3077 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
3078 }
3079 if !prev_line_trailing_whitespace_range.is_empty() {
3080 ranges.push(prev_line_trailing_whitespace_range);
3081 }
3082
3083 offset = line_end_offset + 1;
3084 prev_line_trailing_whitespace_range = trailing_whitespace_range;
3085 }
3086
3087 offset -= 1;
3088 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
3089 }
3090
3091 if !prev_chunk_trailing_whitespace_range.is_empty() {
3092 ranges.push(prev_chunk_trailing_whitespace_range);
3093 }
3094
3095 ranges
3096}