1use crate::{
2 DebugVariableCapture, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
3 TextObject, TreeSitterOptions,
4 diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
5 language_settings::{LanguageSettings, language_settings},
6 outline::OutlineItem,
7 syntax_map::{
8 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
9 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
10 },
11 task_context::RunnableRange,
12 text_diff::text_diff,
13};
14pub use crate::{
15 Grammar, Language, LanguageRegistry,
16 diagnostic_set::DiagnosticSet,
17 highlight_map::{HighlightId, HighlightMap},
18 proto,
19};
20use anyhow::{Context as _, Result, anyhow};
21use async_watch as watch;
22use clock::Lamport;
23pub use clock::ReplicaId;
24use collections::HashMap;
25use fs::MTime;
26use futures::channel::oneshot;
27use gpui::{
28 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
29 Task, TaskLabel, TextStyle,
30};
31use lsp::{LanguageServerId, NumberOrString};
32use parking_lot::Mutex;
33use schemars::JsonSchema;
34use serde::{Deserialize, Serialize};
35use serde_json::Value;
36use settings::WorktreeId;
37use smallvec::SmallVec;
38use smol::future::yield_now;
39use std::{
40 any::Any,
41 borrow::Cow,
42 cell::Cell,
43 cmp::{self, Ordering, Reverse},
44 collections::{BTreeMap, BTreeSet},
45 ffi::OsStr,
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::{Path, PathBuf},
52 rc,
53 sync::{Arc, LazyLock},
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, debug_panic, maybe};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76#[derive(Debug)]
77pub struct DebugVariableRanges {
78 pub buffer_id: BufferId,
79 pub range: Range<usize>,
80}
81
82/// A label for the background task spawned by the buffer to compute
83/// a diff against the contents of its file.
84pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
85
86/// Indicate whether a [`Buffer`] has permissions to edit.
87#[derive(PartialEq, Clone, Copy, Debug)]
88pub enum Capability {
89 /// The buffer is a mutable replica.
90 ReadWrite,
91 /// The buffer is a read-only replica.
92 ReadOnly,
93}
94
95pub type BufferRow = u32;
96
97/// An in-memory representation of a source code file, including its text,
98/// syntax trees, git status, and diagnostics.
99pub struct Buffer {
100 text: TextBuffer,
101 branch_state: Option<BufferBranchState>,
102 /// Filesystem state, `None` when there is no path.
103 file: Option<Arc<dyn File>>,
104 /// The mtime of the file when this buffer was last loaded from
105 /// or saved to disk.
106 saved_mtime: Option<MTime>,
107 /// The version vector when this buffer was last loaded from
108 /// or saved to disk.
109 saved_version: clock::Global,
110 preview_version: clock::Global,
111 transaction_depth: usize,
112 was_dirty_before_starting_transaction: Option<bool>,
113 reload_task: Option<Task<Result<()>>>,
114 language: Option<Arc<Language>>,
115 autoindent_requests: Vec<Arc<AutoindentRequest>>,
116 pending_autoindent: Option<Task<()>>,
117 sync_parse_timeout: Duration,
118 syntax_map: Mutex<SyntaxMap>,
119 reparse: Option<Task<()>>,
120 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
121 non_text_state_update_count: usize,
122 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
123 remote_selections: TreeMap<ReplicaId, SelectionSet>,
124 diagnostics_timestamp: clock::Lamport,
125 completion_triggers: BTreeSet<String>,
126 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
127 completion_triggers_timestamp: clock::Lamport,
128 deferred_ops: OperationQueue<Operation>,
129 capability: Capability,
130 has_conflict: bool,
131 /// Memoize calls to has_changes_since(saved_version).
132 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
133 has_unsaved_edits: Cell<(clock::Global, bool)>,
134 change_bits: Vec<rc::Weak<Cell<bool>>>,
135 _subscriptions: Vec<gpui::Subscription>,
136}
137
138#[derive(Copy, Clone, Debug, PartialEq, Eq)]
139pub enum ParseStatus {
140 Idle,
141 Parsing,
142}
143
144struct BufferBranchState {
145 base_buffer: Entity<Buffer>,
146 merged_operations: Vec<Lamport>,
147}
148
149/// An immutable, cheaply cloneable representation of a fixed
150/// state of a buffer.
151pub struct BufferSnapshot {
152 pub text: text::BufferSnapshot,
153 pub(crate) syntax: SyntaxSnapshot,
154 file: Option<Arc<dyn File>>,
155 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
156 remote_selections: TreeMap<ReplicaId, SelectionSet>,
157 language: Option<Arc<Language>>,
158 non_text_state_update_count: usize,
159}
160
161/// The kind and amount of indentation in a particular line. For now,
162/// assumes that indentation is all the same character.
163#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
164pub struct IndentSize {
165 /// The number of bytes that comprise the indentation.
166 pub len: u32,
167 /// The kind of whitespace used for indentation.
168 pub kind: IndentKind,
169}
170
171/// A whitespace character that's used for indentation.
172#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
173pub enum IndentKind {
174 /// An ASCII space character.
175 #[default]
176 Space,
177 /// An ASCII tab character.
178 Tab,
179}
180
181/// The shape of a selection cursor.
182#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
183#[serde(rename_all = "snake_case")]
184pub enum CursorShape {
185 /// A vertical bar
186 #[default]
187 Bar,
188 /// A block that surrounds the following character
189 Block,
190 /// An underline that runs along the following character
191 Underline,
192 /// A box drawn around the following character
193 Hollow,
194}
195
196#[derive(Clone, Debug)]
197struct SelectionSet {
198 line_mode: bool,
199 cursor_shape: CursorShape,
200 selections: Arc<[Selection<Anchor>]>,
201 lamport_timestamp: clock::Lamport,
202}
203
204/// A diagnostic associated with a certain range of a buffer.
205#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
206pub struct Diagnostic {
207 /// The name of the service that produced this diagnostic.
208 pub source: Option<String>,
209 /// A machine-readable code that identifies this diagnostic.
210 pub code: Option<NumberOrString>,
211 pub code_description: Option<lsp::Url>,
212 /// Whether this diagnostic is a hint, warning, or error.
213 pub severity: DiagnosticSeverity,
214 /// The human-readable message associated with this diagnostic.
215 pub message: String,
216 /// The human-readable message (in markdown format)
217 pub markdown: Option<String>,
218 /// An id that identifies the group to which this diagnostic belongs.
219 ///
220 /// When a language server produces a diagnostic with
221 /// one or more associated diagnostics, those diagnostics are all
222 /// assigned a single group ID.
223 pub group_id: usize,
224 /// Whether this diagnostic is the primary diagnostic for its group.
225 ///
226 /// In a given group, the primary diagnostic is the top-level diagnostic
227 /// returned by the language server. The non-primary diagnostics are the
228 /// associated diagnostics.
229 pub is_primary: bool,
230 /// Whether this diagnostic is considered to originate from an analysis of
231 /// files on disk, as opposed to any unsaved buffer contents. This is a
232 /// property of a given diagnostic source, and is configured for a given
233 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
234 /// for the language server.
235 pub is_disk_based: bool,
236 /// Whether this diagnostic marks unnecessary code.
237 pub is_unnecessary: bool,
238 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
239 pub data: Option<Value>,
240}
241
242/// An operation used to synchronize this buffer with its other replicas.
243#[derive(Clone, Debug, PartialEq)]
244pub enum Operation {
245 /// A text operation.
246 Buffer(text::Operation),
247
248 /// An update to the buffer's diagnostics.
249 UpdateDiagnostics {
250 /// The id of the language server that produced the new diagnostics.
251 server_id: LanguageServerId,
252 /// The diagnostics.
253 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
254 /// The buffer's lamport timestamp.
255 lamport_timestamp: clock::Lamport,
256 },
257
258 /// An update to the most recent selections in this buffer.
259 UpdateSelections {
260 /// The selections.
261 selections: Arc<[Selection<Anchor>]>,
262 /// The buffer's lamport timestamp.
263 lamport_timestamp: clock::Lamport,
264 /// Whether the selections are in 'line mode'.
265 line_mode: bool,
266 /// The [`CursorShape`] associated with these selections.
267 cursor_shape: CursorShape,
268 },
269
270 /// An update to the characters that should trigger autocompletion
271 /// for this buffer.
272 UpdateCompletionTriggers {
273 /// The characters that trigger autocompletion.
274 triggers: Vec<String>,
275 /// The buffer's lamport timestamp.
276 lamport_timestamp: clock::Lamport,
277 /// The language server ID.
278 server_id: LanguageServerId,
279 },
280}
281
282/// An event that occurs in a buffer.
283#[derive(Clone, Debug, PartialEq)]
284pub enum BufferEvent {
285 /// The buffer was changed in a way that must be
286 /// propagated to its other replicas.
287 Operation {
288 operation: Operation,
289 is_local: bool,
290 },
291 /// The buffer was edited.
292 Edited,
293 /// The buffer's `dirty` bit changed.
294 DirtyChanged,
295 /// The buffer was saved.
296 Saved,
297 /// The buffer's file was changed on disk.
298 FileHandleChanged,
299 /// The buffer was reloaded.
300 Reloaded,
301 /// The buffer is in need of a reload
302 ReloadNeeded,
303 /// The buffer's language was changed.
304 LanguageChanged,
305 /// The buffer's syntax trees were updated.
306 Reparsed,
307 /// The buffer's diagnostics were updated.
308 DiagnosticsUpdated,
309 /// The buffer gained or lost editing capabilities.
310 CapabilityChanged,
311 /// The buffer was explicitly requested to close.
312 Closed,
313 /// The buffer was discarded when closing.
314 Discarded,
315}
316
317/// The file associated with a buffer.
318pub trait File: Send + Sync + Any {
319 /// Returns the [`LocalFile`] associated with this file, if the
320 /// file is local.
321 fn as_local(&self) -> Option<&dyn LocalFile>;
322
323 /// Returns whether this file is local.
324 fn is_local(&self) -> bool {
325 self.as_local().is_some()
326 }
327
328 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
329 /// only available in some states, such as modification time.
330 fn disk_state(&self) -> DiskState;
331
332 /// Returns the path of this file relative to the worktree's root directory.
333 fn path(&self) -> &Arc<Path>;
334
335 /// Returns the path of this file relative to the worktree's parent directory (this means it
336 /// includes the name of the worktree's root folder).
337 fn full_path(&self, cx: &App) -> PathBuf;
338
339 /// Returns the last component of this handle's absolute path. If this handle refers to the root
340 /// of its worktree, then this method will return the name of the worktree itself.
341 fn file_name<'a>(&'a self, cx: &'a App) -> &'a OsStr;
342
343 /// Returns the id of the worktree to which this file belongs.
344 ///
345 /// This is needed for looking up project-specific settings.
346 fn worktree_id(&self, cx: &App) -> WorktreeId;
347
348 /// Converts this file into a protobuf message.
349 fn to_proto(&self, cx: &App) -> rpc::proto::File;
350
351 /// Return whether Zed considers this to be a private file.
352 fn is_private(&self) -> bool;
353}
354
355/// The file's storage status - whether it's stored (`Present`), and if so when it was last
356/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
357/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
358/// indicator for new files.
359#[derive(Copy, Clone, Debug, PartialEq)]
360pub enum DiskState {
361 /// File created in Zed that has not been saved.
362 New,
363 /// File present on the filesystem.
364 Present { mtime: MTime },
365 /// Deleted file that was previously present.
366 Deleted,
367}
368
369impl DiskState {
370 /// Returns the file's last known modification time on disk.
371 pub fn mtime(self) -> Option<MTime> {
372 match self {
373 DiskState::New => None,
374 DiskState::Present { mtime } => Some(mtime),
375 DiskState::Deleted => None,
376 }
377 }
378
379 pub fn exists(&self) -> bool {
380 match self {
381 DiskState::New => false,
382 DiskState::Present { .. } => true,
383 DiskState::Deleted => false,
384 }
385 }
386}
387
388/// The file associated with a buffer, in the case where the file is on the local disk.
389pub trait LocalFile: File {
390 /// Returns the absolute path of this file
391 fn abs_path(&self, cx: &App) -> PathBuf;
392
393 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
394 fn load(&self, cx: &App) -> Task<Result<String>>;
395
396 /// Loads the file's contents from disk.
397 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
398}
399
400/// The auto-indent behavior associated with an editing operation.
401/// For some editing operations, each affected line of text has its
402/// indentation recomputed. For other operations, the entire block
403/// of edited text is adjusted uniformly.
404#[derive(Clone, Debug)]
405pub enum AutoindentMode {
406 /// Indent each line of inserted text.
407 EachLine,
408 /// Apply the same indentation adjustment to all of the lines
409 /// in a given insertion.
410 Block {
411 /// The original indentation column of the first line of each
412 /// insertion, if it has been copied.
413 ///
414 /// Knowing this makes it possible to preserve the relative indentation
415 /// of every line in the insertion from when it was copied.
416 ///
417 /// If the original indent column is `a`, and the first line of insertion
418 /// is then auto-indented to column `b`, then every other line of
419 /// the insertion will be auto-indented to column `b - a`
420 original_indent_columns: Vec<Option<u32>>,
421 },
422}
423
424#[derive(Clone)]
425struct AutoindentRequest {
426 before_edit: BufferSnapshot,
427 entries: Vec<AutoindentRequestEntry>,
428 is_block_mode: bool,
429 ignore_empty_lines: bool,
430}
431
432#[derive(Debug, Clone)]
433struct AutoindentRequestEntry {
434 /// A range of the buffer whose indentation should be adjusted.
435 range: Range<Anchor>,
436 /// Whether or not these lines should be considered brand new, for the
437 /// purpose of auto-indent. When text is not new, its indentation will
438 /// only be adjusted if the suggested indentation level has *changed*
439 /// since the edit was made.
440 first_line_is_new: bool,
441 indent_size: IndentSize,
442 original_indent_column: Option<u32>,
443}
444
445#[derive(Debug)]
446struct IndentSuggestion {
447 basis_row: u32,
448 delta: Ordering,
449 within_error: bool,
450}
451
452struct BufferChunkHighlights<'a> {
453 captures: SyntaxMapCaptures<'a>,
454 next_capture: Option<SyntaxMapCapture<'a>>,
455 stack: Vec<(usize, HighlightId)>,
456 highlight_maps: Vec<HighlightMap>,
457}
458
459/// An iterator that yields chunks of a buffer's text, along with their
460/// syntax highlights and diagnostic status.
461pub struct BufferChunks<'a> {
462 buffer_snapshot: Option<&'a BufferSnapshot>,
463 range: Range<usize>,
464 chunks: text::Chunks<'a>,
465 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
466 error_depth: usize,
467 warning_depth: usize,
468 information_depth: usize,
469 hint_depth: usize,
470 unnecessary_depth: usize,
471 highlights: Option<BufferChunkHighlights<'a>>,
472}
473
474/// A chunk of a buffer's text, along with its syntax highlight and
475/// diagnostic status.
476#[derive(Clone, Debug, Default)]
477pub struct Chunk<'a> {
478 /// The text of the chunk.
479 pub text: &'a str,
480 /// The syntax highlighting style of the chunk.
481 pub syntax_highlight_id: Option<HighlightId>,
482 /// The highlight style that has been applied to this chunk in
483 /// the editor.
484 pub highlight_style: Option<HighlightStyle>,
485 /// The severity of diagnostic associated with this chunk, if any.
486 pub diagnostic_severity: Option<DiagnosticSeverity>,
487 /// Whether this chunk of text is marked as unnecessary.
488 pub is_unnecessary: bool,
489 /// Whether this chunk of text was originally a tab character.
490 pub is_tab: bool,
491}
492
493/// A set of edits to a given version of a buffer, computed asynchronously.
494#[derive(Debug)]
495pub struct Diff {
496 pub base_version: clock::Global,
497 pub line_ending: LineEnding,
498 pub edits: Vec<(Range<usize>, Arc<str>)>,
499}
500
501#[derive(Clone, Copy)]
502pub(crate) struct DiagnosticEndpoint {
503 offset: usize,
504 is_start: bool,
505 severity: DiagnosticSeverity,
506 is_unnecessary: bool,
507}
508
509/// A class of characters, used for characterizing a run of text.
510#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
511pub enum CharKind {
512 /// Whitespace.
513 Whitespace,
514 /// Punctuation.
515 Punctuation,
516 /// Word.
517 Word,
518}
519
520/// A runnable is a set of data about a region that could be resolved into a task
521pub struct Runnable {
522 pub tags: SmallVec<[RunnableTag; 1]>,
523 pub language: Arc<Language>,
524 pub buffer: BufferId,
525}
526
527#[derive(Default, Clone, Debug)]
528pub struct HighlightedText {
529 pub text: SharedString,
530 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
531}
532
533#[derive(Default, Debug)]
534struct HighlightedTextBuilder {
535 pub text: String,
536 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
537}
538
539impl HighlightedText {
540 pub fn from_buffer_range<T: ToOffset>(
541 range: Range<T>,
542 snapshot: &text::BufferSnapshot,
543 syntax_snapshot: &SyntaxSnapshot,
544 override_style: Option<HighlightStyle>,
545 syntax_theme: &SyntaxTheme,
546 ) -> Self {
547 let mut highlighted_text = HighlightedTextBuilder::default();
548 highlighted_text.add_text_from_buffer_range(
549 range,
550 snapshot,
551 syntax_snapshot,
552 override_style,
553 syntax_theme,
554 );
555 highlighted_text.build()
556 }
557
558 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
559 gpui::StyledText::new(self.text.clone())
560 .with_default_highlights(default_style, self.highlights.iter().cloned())
561 }
562
563 /// Returns the first line without leading whitespace unless highlighted
564 /// and a boolean indicating if there are more lines after
565 pub fn first_line_preview(self) -> (Self, bool) {
566 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
567 let first_line = &self.text[..newline_ix];
568
569 // Trim leading whitespace, unless an edit starts prior to it.
570 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
571 if let Some((first_highlight_range, _)) = self.highlights.first() {
572 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
573 }
574
575 let preview_text = &first_line[preview_start_ix..];
576 let preview_highlights = self
577 .highlights
578 .into_iter()
579 .take_while(|(range, _)| range.start < newline_ix)
580 .filter_map(|(mut range, highlight)| {
581 range.start = range.start.saturating_sub(preview_start_ix);
582 range.end = range.end.saturating_sub(preview_start_ix).min(newline_ix);
583 if range.is_empty() {
584 None
585 } else {
586 Some((range, highlight))
587 }
588 });
589
590 let preview = Self {
591 text: SharedString::new(preview_text),
592 highlights: preview_highlights.collect(),
593 };
594
595 (preview, self.text.len() > newline_ix)
596 }
597}
598
599impl HighlightedTextBuilder {
600 pub fn build(self) -> HighlightedText {
601 HighlightedText {
602 text: self.text.into(),
603 highlights: self.highlights,
604 }
605 }
606
607 pub fn add_text_from_buffer_range<T: ToOffset>(
608 &mut self,
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) {
615 let range = range.to_offset(snapshot);
616 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
617 let start = self.text.len();
618 self.text.push_str(chunk.text);
619 let end = self.text.len();
620
621 if let Some(mut highlight_style) = chunk
622 .syntax_highlight_id
623 .and_then(|id| id.style(syntax_theme))
624 {
625 if let Some(override_style) = override_style {
626 highlight_style.highlight(override_style);
627 }
628 self.highlights.push((start..end, highlight_style));
629 } else if let Some(override_style) = override_style {
630 self.highlights.push((start..end, override_style));
631 }
632 }
633 }
634
635 fn highlighted_chunks<'a>(
636 range: Range<usize>,
637 snapshot: &'a text::BufferSnapshot,
638 syntax_snapshot: &'a SyntaxSnapshot,
639 ) -> BufferChunks<'a> {
640 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
641 grammar.highlights_query.as_ref()
642 });
643
644 let highlight_maps = captures
645 .grammars()
646 .iter()
647 .map(|grammar| grammar.highlight_map())
648 .collect();
649
650 BufferChunks::new(
651 snapshot.as_rope(),
652 range,
653 Some((captures, highlight_maps)),
654 false,
655 None,
656 )
657 }
658}
659
660#[derive(Clone)]
661pub struct EditPreview {
662 old_snapshot: text::BufferSnapshot,
663 applied_edits_snapshot: text::BufferSnapshot,
664 syntax_snapshot: SyntaxSnapshot,
665}
666
667impl EditPreview {
668 pub fn highlight_edits(
669 &self,
670 current_snapshot: &BufferSnapshot,
671 edits: &[(Range<Anchor>, String)],
672 include_deletions: bool,
673 cx: &App,
674 ) -> HighlightedText {
675 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
676 return HighlightedText::default();
677 };
678
679 let mut highlighted_text = HighlightedTextBuilder::default();
680
681 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
682
683 let insertion_highlight_style = HighlightStyle {
684 background_color: Some(cx.theme().status().created_background),
685 ..Default::default()
686 };
687 let deletion_highlight_style = HighlightStyle {
688 background_color: Some(cx.theme().status().deleted_background),
689 ..Default::default()
690 };
691 let syntax_theme = cx.theme().syntax();
692
693 for (range, edit_text) in edits {
694 let edit_new_end_in_preview_snapshot = range
695 .end
696 .bias_right(&self.old_snapshot)
697 .to_offset(&self.applied_edits_snapshot);
698 let edit_start_in_preview_snapshot = edit_new_end_in_preview_snapshot - edit_text.len();
699
700 let unchanged_range_in_preview_snapshot =
701 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
702 if !unchanged_range_in_preview_snapshot.is_empty() {
703 highlighted_text.add_text_from_buffer_range(
704 unchanged_range_in_preview_snapshot,
705 &self.applied_edits_snapshot,
706 &self.syntax_snapshot,
707 None,
708 &syntax_theme,
709 );
710 }
711
712 let range_in_current_snapshot = range.to_offset(current_snapshot);
713 if include_deletions && !range_in_current_snapshot.is_empty() {
714 highlighted_text.add_text_from_buffer_range(
715 range_in_current_snapshot,
716 ¤t_snapshot.text,
717 ¤t_snapshot.syntax,
718 Some(deletion_highlight_style),
719 &syntax_theme,
720 );
721 }
722
723 if !edit_text.is_empty() {
724 highlighted_text.add_text_from_buffer_range(
725 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
726 &self.applied_edits_snapshot,
727 &self.syntax_snapshot,
728 Some(insertion_highlight_style),
729 &syntax_theme,
730 );
731 }
732
733 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
734 }
735
736 highlighted_text.add_text_from_buffer_range(
737 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
738 &self.applied_edits_snapshot,
739 &self.syntax_snapshot,
740 None,
741 &syntax_theme,
742 );
743
744 highlighted_text.build()
745 }
746
747 fn compute_visible_range(&self, edits: &[(Range<Anchor>, String)]) -> Option<Range<usize>> {
748 let (first, _) = edits.first()?;
749 let (last, _) = edits.last()?;
750
751 let start = first
752 .start
753 .bias_left(&self.old_snapshot)
754 .to_point(&self.applied_edits_snapshot);
755 let end = last
756 .end
757 .bias_right(&self.old_snapshot)
758 .to_point(&self.applied_edits_snapshot);
759
760 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
761 let range = Point::new(start.row, 0)
762 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
763
764 Some(range.to_offset(&self.applied_edits_snapshot))
765 }
766}
767
768#[derive(Clone, Debug, PartialEq, Eq)]
769pub struct BracketMatch {
770 pub open_range: Range<usize>,
771 pub close_range: Range<usize>,
772 pub newline_only: bool,
773}
774
775impl Buffer {
776 /// Create a new buffer with the given base text.
777 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
778 Self::build(
779 TextBuffer::new(0, cx.entity_id().as_non_zero_u64().into(), base_text.into()),
780 None,
781 Capability::ReadWrite,
782 )
783 }
784
785 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
786 pub fn local_normalized(
787 base_text_normalized: Rope,
788 line_ending: LineEnding,
789 cx: &Context<Self>,
790 ) -> Self {
791 Self::build(
792 TextBuffer::new_normalized(
793 0,
794 cx.entity_id().as_non_zero_u64().into(),
795 line_ending,
796 base_text_normalized,
797 ),
798 None,
799 Capability::ReadWrite,
800 )
801 }
802
803 /// Create a new buffer that is a replica of a remote buffer.
804 pub fn remote(
805 remote_id: BufferId,
806 replica_id: ReplicaId,
807 capability: Capability,
808 base_text: impl Into<String>,
809 ) -> Self {
810 Self::build(
811 TextBuffer::new(replica_id, remote_id, base_text.into()),
812 None,
813 capability,
814 )
815 }
816
817 /// Create a new buffer that is a replica of a remote buffer, populating its
818 /// state from the given protobuf message.
819 pub fn from_proto(
820 replica_id: ReplicaId,
821 capability: Capability,
822 message: proto::BufferState,
823 file: Option<Arc<dyn File>>,
824 ) -> Result<Self> {
825 let buffer_id = BufferId::new(message.id)
826 .with_context(|| anyhow!("Could not deserialize buffer_id"))?;
827 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
828 let mut this = Self::build(buffer, file, capability);
829 this.text.set_line_ending(proto::deserialize_line_ending(
830 rpc::proto::LineEnding::from_i32(message.line_ending)
831 .ok_or_else(|| anyhow!("missing line_ending"))?,
832 ));
833 this.saved_version = proto::deserialize_version(&message.saved_version);
834 this.saved_mtime = message.saved_mtime.map(|time| time.into());
835 Ok(this)
836 }
837
838 /// Serialize the buffer's state to a protobuf message.
839 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
840 proto::BufferState {
841 id: self.remote_id().into(),
842 file: self.file.as_ref().map(|f| f.to_proto(cx)),
843 base_text: self.base_text().to_string(),
844 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
845 saved_version: proto::serialize_version(&self.saved_version),
846 saved_mtime: self.saved_mtime.map(|time| time.into()),
847 }
848 }
849
850 /// Serialize as protobufs all of the changes to the buffer since the given version.
851 pub fn serialize_ops(
852 &self,
853 since: Option<clock::Global>,
854 cx: &App,
855 ) -> Task<Vec<proto::Operation>> {
856 let mut operations = Vec::new();
857 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
858
859 operations.extend(self.remote_selections.iter().map(|(_, set)| {
860 proto::serialize_operation(&Operation::UpdateSelections {
861 selections: set.selections.clone(),
862 lamport_timestamp: set.lamport_timestamp,
863 line_mode: set.line_mode,
864 cursor_shape: set.cursor_shape,
865 })
866 }));
867
868 for (server_id, diagnostics) in &self.diagnostics {
869 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
870 lamport_timestamp: self.diagnostics_timestamp,
871 server_id: *server_id,
872 diagnostics: diagnostics.iter().cloned().collect(),
873 }));
874 }
875
876 for (server_id, completions) in &self.completion_triggers_per_language_server {
877 operations.push(proto::serialize_operation(
878 &Operation::UpdateCompletionTriggers {
879 triggers: completions.iter().cloned().collect(),
880 lamport_timestamp: self.completion_triggers_timestamp,
881 server_id: *server_id,
882 },
883 ));
884 }
885
886 let text_operations = self.text.operations().clone();
887 cx.background_spawn(async move {
888 let since = since.unwrap_or_default();
889 operations.extend(
890 text_operations
891 .iter()
892 .filter(|(_, op)| !since.observed(op.timestamp()))
893 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
894 );
895 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
896 operations
897 })
898 }
899
900 /// Assign a language to the buffer, returning the buffer.
901 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
902 self.set_language(Some(language), cx);
903 self
904 }
905
906 /// Returns the [`Capability`] of this buffer.
907 pub fn capability(&self) -> Capability {
908 self.capability
909 }
910
911 /// Whether this buffer can only be read.
912 pub fn read_only(&self) -> bool {
913 self.capability == Capability::ReadOnly
914 }
915
916 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
917 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
918 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
919 let snapshot = buffer.snapshot();
920 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
921 Self {
922 saved_mtime,
923 saved_version: buffer.version(),
924 preview_version: buffer.version(),
925 reload_task: None,
926 transaction_depth: 0,
927 was_dirty_before_starting_transaction: None,
928 has_unsaved_edits: Cell::new((buffer.version(), false)),
929 text: buffer,
930 branch_state: None,
931 file,
932 capability,
933 syntax_map,
934 reparse: None,
935 non_text_state_update_count: 0,
936 sync_parse_timeout: Duration::from_millis(1),
937 parse_status: async_watch::channel(ParseStatus::Idle),
938 autoindent_requests: Default::default(),
939 pending_autoindent: Default::default(),
940 language: None,
941 remote_selections: Default::default(),
942 diagnostics: Default::default(),
943 diagnostics_timestamp: Default::default(),
944 completion_triggers: Default::default(),
945 completion_triggers_per_language_server: Default::default(),
946 completion_triggers_timestamp: Default::default(),
947 deferred_ops: OperationQueue::new(),
948 has_conflict: false,
949 change_bits: Default::default(),
950 _subscriptions: Vec::new(),
951 }
952 }
953
954 pub fn build_snapshot(
955 text: Rope,
956 language: Option<Arc<Language>>,
957 language_registry: Option<Arc<LanguageRegistry>>,
958 cx: &mut App,
959 ) -> impl Future<Output = BufferSnapshot> + use<> {
960 let entity_id = cx.reserve_entity::<Self>().entity_id();
961 let buffer_id = entity_id.as_non_zero_u64().into();
962 async move {
963 let text =
964 TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
965 let mut syntax = SyntaxMap::new(&text).snapshot();
966 if let Some(language) = language.clone() {
967 let text = text.clone();
968 let language = language.clone();
969 let language_registry = language_registry.clone();
970 syntax.reparse(&text, language_registry, language);
971 }
972 BufferSnapshot {
973 text,
974 syntax,
975 file: None,
976 diagnostics: Default::default(),
977 remote_selections: Default::default(),
978 language,
979 non_text_state_update_count: 0,
980 }
981 }
982 }
983
984 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
985 let entity_id = cx.reserve_entity::<Self>().entity_id();
986 let buffer_id = entity_id.as_non_zero_u64().into();
987 let text =
988 TextBuffer::new_normalized(0, buffer_id, Default::default(), Rope::new()).snapshot();
989 let syntax = SyntaxMap::new(&text).snapshot();
990 BufferSnapshot {
991 text,
992 syntax,
993 file: None,
994 diagnostics: Default::default(),
995 remote_selections: Default::default(),
996 language: None,
997 non_text_state_update_count: 0,
998 }
999 }
1000
1001 #[cfg(any(test, feature = "test-support"))]
1002 pub fn build_snapshot_sync(
1003 text: Rope,
1004 language: Option<Arc<Language>>,
1005 language_registry: Option<Arc<LanguageRegistry>>,
1006 cx: &mut App,
1007 ) -> BufferSnapshot {
1008 let entity_id = cx.reserve_entity::<Self>().entity_id();
1009 let buffer_id = entity_id.as_non_zero_u64().into();
1010 let text = TextBuffer::new_normalized(0, buffer_id, Default::default(), text).snapshot();
1011 let mut syntax = SyntaxMap::new(&text).snapshot();
1012 if let Some(language) = language.clone() {
1013 let text = text.clone();
1014 let language = language.clone();
1015 let language_registry = language_registry.clone();
1016 syntax.reparse(&text, language_registry, language);
1017 }
1018 BufferSnapshot {
1019 text,
1020 syntax,
1021 file: None,
1022 diagnostics: Default::default(),
1023 remote_selections: Default::default(),
1024 language,
1025 non_text_state_update_count: 0,
1026 }
1027 }
1028
1029 /// Retrieve a snapshot of the buffer's current state. This is computationally
1030 /// cheap, and allows reading from the buffer on a background thread.
1031 pub fn snapshot(&self) -> BufferSnapshot {
1032 let text = self.text.snapshot();
1033 let mut syntax_map = self.syntax_map.lock();
1034 syntax_map.interpolate(&text);
1035 let syntax = syntax_map.snapshot();
1036
1037 BufferSnapshot {
1038 text,
1039 syntax,
1040 file: self.file.clone(),
1041 remote_selections: self.remote_selections.clone(),
1042 diagnostics: self.diagnostics.clone(),
1043 language: self.language.clone(),
1044 non_text_state_update_count: self.non_text_state_update_count,
1045 }
1046 }
1047
1048 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1049 let this = cx.entity();
1050 cx.new(|cx| {
1051 let mut branch = Self {
1052 branch_state: Some(BufferBranchState {
1053 base_buffer: this.clone(),
1054 merged_operations: Default::default(),
1055 }),
1056 language: self.language.clone(),
1057 has_conflict: self.has_conflict,
1058 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1059 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1060 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1061 };
1062 if let Some(language_registry) = self.language_registry() {
1063 branch.set_language_registry(language_registry);
1064 }
1065
1066 // Reparse the branch buffer so that we get syntax highlighting immediately.
1067 branch.reparse(cx);
1068
1069 branch
1070 })
1071 }
1072
1073 pub fn preview_edits(
1074 &self,
1075 edits: Arc<[(Range<Anchor>, String)]>,
1076 cx: &App,
1077 ) -> Task<EditPreview> {
1078 let registry = self.language_registry();
1079 let language = self.language().cloned();
1080 let old_snapshot = self.text.snapshot();
1081 let mut branch_buffer = self.text.branch();
1082 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1083 cx.background_spawn(async move {
1084 if !edits.is_empty() {
1085 if let Some(language) = language.clone() {
1086 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1087 }
1088
1089 branch_buffer.edit(edits.iter().cloned());
1090 let snapshot = branch_buffer.snapshot();
1091 syntax_snapshot.interpolate(&snapshot);
1092
1093 if let Some(language) = language {
1094 syntax_snapshot.reparse(&snapshot, registry, language);
1095 }
1096 }
1097 EditPreview {
1098 old_snapshot,
1099 applied_edits_snapshot: branch_buffer.snapshot(),
1100 syntax_snapshot,
1101 }
1102 })
1103 }
1104
1105 /// Applies all of the changes in this buffer that intersect any of the
1106 /// given `ranges` to its base buffer.
1107 ///
1108 /// If `ranges` is empty, then all changes will be applied. This buffer must
1109 /// be a branch buffer to call this method.
1110 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1111 let Some(base_buffer) = self.base_buffer() else {
1112 debug_panic!("not a branch buffer");
1113 return;
1114 };
1115
1116 let mut ranges = if ranges.is_empty() {
1117 &[0..usize::MAX]
1118 } else {
1119 ranges.as_slice()
1120 }
1121 .into_iter()
1122 .peekable();
1123
1124 let mut edits = Vec::new();
1125 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1126 let mut is_included = false;
1127 while let Some(range) = ranges.peek() {
1128 if range.end < edit.new.start {
1129 ranges.next().unwrap();
1130 } else {
1131 if range.start <= edit.new.end {
1132 is_included = true;
1133 }
1134 break;
1135 }
1136 }
1137
1138 if is_included {
1139 edits.push((
1140 edit.old.clone(),
1141 self.text_for_range(edit.new.clone()).collect::<String>(),
1142 ));
1143 }
1144 }
1145
1146 let operation = base_buffer.update(cx, |base_buffer, cx| {
1147 // cx.emit(BufferEvent::DiffBaseChanged);
1148 base_buffer.edit(edits, None, cx)
1149 });
1150
1151 if let Some(operation) = operation {
1152 if let Some(BufferBranchState {
1153 merged_operations, ..
1154 }) = &mut self.branch_state
1155 {
1156 merged_operations.push(operation);
1157 }
1158 }
1159 }
1160
1161 fn on_base_buffer_event(
1162 &mut self,
1163 _: Entity<Buffer>,
1164 event: &BufferEvent,
1165 cx: &mut Context<Self>,
1166 ) {
1167 let BufferEvent::Operation { operation, .. } = event else {
1168 return;
1169 };
1170 let Some(BufferBranchState {
1171 merged_operations, ..
1172 }) = &mut self.branch_state
1173 else {
1174 return;
1175 };
1176
1177 let mut operation_to_undo = None;
1178 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation {
1179 if let Ok(ix) = merged_operations.binary_search(&operation.timestamp) {
1180 merged_operations.remove(ix);
1181 operation_to_undo = Some(operation.timestamp);
1182 }
1183 }
1184
1185 self.apply_ops([operation.clone()], cx);
1186
1187 if let Some(timestamp) = operation_to_undo {
1188 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1189 self.undo_operations(counts, cx);
1190 }
1191 }
1192
1193 #[cfg(test)]
1194 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1195 &self.text
1196 }
1197
1198 /// Retrieve a snapshot of the buffer's raw text, without any
1199 /// language-related state like the syntax tree or diagnostics.
1200 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1201 self.text.snapshot()
1202 }
1203
1204 /// The file associated with the buffer, if any.
1205 pub fn file(&self) -> Option<&Arc<dyn File>> {
1206 self.file.as_ref()
1207 }
1208
1209 /// The version of the buffer that was last saved or reloaded from disk.
1210 pub fn saved_version(&self) -> &clock::Global {
1211 &self.saved_version
1212 }
1213
1214 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1215 pub fn saved_mtime(&self) -> Option<MTime> {
1216 self.saved_mtime
1217 }
1218
1219 /// Assign a language to the buffer.
1220 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1221 self.non_text_state_update_count += 1;
1222 self.syntax_map.lock().clear(&self.text);
1223 self.language = language;
1224 self.was_changed();
1225 self.reparse(cx);
1226 cx.emit(BufferEvent::LanguageChanged);
1227 }
1228
1229 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1230 /// other languages if parts of the buffer are written in different languages.
1231 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1232 self.syntax_map
1233 .lock()
1234 .set_language_registry(language_registry);
1235 }
1236
1237 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1238 self.syntax_map.lock().language_registry()
1239 }
1240
1241 /// Assign the buffer a new [`Capability`].
1242 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1243 self.capability = capability;
1244 cx.emit(BufferEvent::CapabilityChanged)
1245 }
1246
1247 /// This method is called to signal that the buffer has been saved.
1248 pub fn did_save(
1249 &mut self,
1250 version: clock::Global,
1251 mtime: Option<MTime>,
1252 cx: &mut Context<Self>,
1253 ) {
1254 self.saved_version = version;
1255 self.has_unsaved_edits
1256 .set((self.saved_version().clone(), false));
1257 self.has_conflict = false;
1258 self.saved_mtime = mtime;
1259 self.was_changed();
1260 cx.emit(BufferEvent::Saved);
1261 cx.notify();
1262 }
1263
1264 /// This method is called to signal that the buffer has been discarded.
1265 pub fn discarded(&self, cx: &mut Context<Self>) {
1266 cx.emit(BufferEvent::Discarded);
1267 cx.notify();
1268 }
1269
1270 /// Reloads the contents of the buffer from disk.
1271 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1272 let (tx, rx) = futures::channel::oneshot::channel();
1273 let prev_version = self.text.version();
1274 self.reload_task = Some(cx.spawn(async move |this, cx| {
1275 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1276 let file = this.file.as_ref()?.as_local()?;
1277
1278 Some((file.disk_state().mtime(), file.load(cx)))
1279 })?
1280 else {
1281 return Ok(());
1282 };
1283
1284 let new_text = new_text.await?;
1285 let diff = this
1286 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1287 .await;
1288 this.update(cx, |this, cx| {
1289 if this.version() == diff.base_version {
1290 this.finalize_last_transaction();
1291 this.apply_diff(diff, cx);
1292 tx.send(this.finalize_last_transaction().cloned()).ok();
1293 this.has_conflict = false;
1294 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1295 } else {
1296 if !diff.edits.is_empty()
1297 || this
1298 .edits_since::<usize>(&diff.base_version)
1299 .next()
1300 .is_some()
1301 {
1302 this.has_conflict = true;
1303 }
1304
1305 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1306 }
1307
1308 this.reload_task.take();
1309 })
1310 }));
1311 rx
1312 }
1313
1314 /// This method is called to signal that the buffer has been reloaded.
1315 pub fn did_reload(
1316 &mut self,
1317 version: clock::Global,
1318 line_ending: LineEnding,
1319 mtime: Option<MTime>,
1320 cx: &mut Context<Self>,
1321 ) {
1322 self.saved_version = version;
1323 self.has_unsaved_edits
1324 .set((self.saved_version.clone(), false));
1325 self.text.set_line_ending(line_ending);
1326 self.saved_mtime = mtime;
1327 cx.emit(BufferEvent::Reloaded);
1328 cx.notify();
1329 }
1330
1331 /// Updates the [`File`] backing this buffer. This should be called when
1332 /// the file has changed or has been deleted.
1333 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1334 let was_dirty = self.is_dirty();
1335 let mut file_changed = false;
1336
1337 if let Some(old_file) = self.file.as_ref() {
1338 if new_file.path() != old_file.path() {
1339 file_changed = true;
1340 }
1341
1342 let old_state = old_file.disk_state();
1343 let new_state = new_file.disk_state();
1344 if old_state != new_state {
1345 file_changed = true;
1346 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1347 cx.emit(BufferEvent::ReloadNeeded)
1348 }
1349 }
1350 } else {
1351 file_changed = true;
1352 };
1353
1354 self.file = Some(new_file);
1355 if file_changed {
1356 self.was_changed();
1357 self.non_text_state_update_count += 1;
1358 if was_dirty != self.is_dirty() {
1359 cx.emit(BufferEvent::DirtyChanged);
1360 }
1361 cx.emit(BufferEvent::FileHandleChanged);
1362 cx.notify();
1363 }
1364 }
1365
1366 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1367 Some(self.branch_state.as_ref()?.base_buffer.clone())
1368 }
1369
1370 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1371 pub fn language(&self) -> Option<&Arc<Language>> {
1372 self.language.as_ref()
1373 }
1374
1375 /// Returns the [`Language`] at the given location.
1376 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1377 let offset = position.to_offset(self);
1378 self.syntax_map
1379 .lock()
1380 .layers_for_range(offset..offset, &self.text, false)
1381 .last()
1382 .map(|info| info.language.clone())
1383 .or_else(|| self.language.clone())
1384 }
1385
1386 /// Returns each [`Language`] for the active syntax layers at the given location.
1387 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1388 let offset = position.to_offset(self);
1389 let mut languages: Vec<Arc<Language>> = self
1390 .syntax_map
1391 .lock()
1392 .layers_for_range(offset..offset, &self.text, false)
1393 .map(|info| info.language.clone())
1394 .collect();
1395
1396 if languages.is_empty() {
1397 if let Some(buffer_language) = self.language() {
1398 languages.push(buffer_language.clone());
1399 }
1400 }
1401
1402 languages
1403 }
1404
1405 /// An integer version number that accounts for all updates besides
1406 /// the buffer's text itself (which is versioned via a version vector).
1407 pub fn non_text_state_update_count(&self) -> usize {
1408 self.non_text_state_update_count
1409 }
1410
1411 /// Whether the buffer is being parsed in the background.
1412 #[cfg(any(test, feature = "test-support"))]
1413 pub fn is_parsing(&self) -> bool {
1414 self.reparse.is_some()
1415 }
1416
1417 /// Indicates whether the buffer contains any regions that may be
1418 /// written in a language that hasn't been loaded yet.
1419 pub fn contains_unknown_injections(&self) -> bool {
1420 self.syntax_map.lock().contains_unknown_injections()
1421 }
1422
1423 #[cfg(test)]
1424 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1425 self.sync_parse_timeout = timeout;
1426 }
1427
1428 /// Called after an edit to synchronize the buffer's main parse tree with
1429 /// the buffer's new underlying state.
1430 ///
1431 /// Locks the syntax map and interpolates the edits since the last reparse
1432 /// into the foreground syntax tree.
1433 ///
1434 /// Then takes a stable snapshot of the syntax map before unlocking it.
1435 /// The snapshot with the interpolated edits is sent to a background thread,
1436 /// where we ask Tree-sitter to perform an incremental parse.
1437 ///
1438 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1439 /// waiting on the parse to complete. As soon as it completes, we proceed
1440 /// synchronously, unless a 1ms timeout elapses.
1441 ///
1442 /// If we time out waiting on the parse, we spawn a second task waiting
1443 /// until the parse does complete and return with the interpolated tree still
1444 /// in the foreground. When the background parse completes, call back into
1445 /// the main thread and assign the foreground parse state.
1446 ///
1447 /// If the buffer or grammar changed since the start of the background parse,
1448 /// initiate an additional reparse recursively. To avoid concurrent parses
1449 /// for the same buffer, we only initiate a new parse if we are not already
1450 /// parsing in the background.
1451 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1452 if self.reparse.is_some() {
1453 return;
1454 }
1455 let language = if let Some(language) = self.language.clone() {
1456 language
1457 } else {
1458 return;
1459 };
1460
1461 let text = self.text_snapshot();
1462 let parsed_version = self.version();
1463
1464 let mut syntax_map = self.syntax_map.lock();
1465 syntax_map.interpolate(&text);
1466 let language_registry = syntax_map.language_registry();
1467 let mut syntax_snapshot = syntax_map.snapshot();
1468 drop(syntax_map);
1469
1470 let parse_task = cx.background_spawn({
1471 let language = language.clone();
1472 let language_registry = language_registry.clone();
1473 async move {
1474 syntax_snapshot.reparse(&text, language_registry, language);
1475 syntax_snapshot
1476 }
1477 });
1478
1479 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1480 match cx
1481 .background_executor()
1482 .block_with_timeout(self.sync_parse_timeout, parse_task)
1483 {
1484 Ok(new_syntax_snapshot) => {
1485 self.did_finish_parsing(new_syntax_snapshot, cx);
1486 self.reparse = None;
1487 }
1488 Err(parse_task) => {
1489 self.reparse = Some(cx.spawn(async move |this, cx| {
1490 let new_syntax_map = parse_task.await;
1491 this.update(cx, move |this, cx| {
1492 let grammar_changed =
1493 this.language.as_ref().map_or(true, |current_language| {
1494 !Arc::ptr_eq(&language, current_language)
1495 });
1496 let language_registry_changed = new_syntax_map
1497 .contains_unknown_injections()
1498 && language_registry.map_or(false, |registry| {
1499 registry.version() != new_syntax_map.language_registry_version()
1500 });
1501 let parse_again = language_registry_changed
1502 || grammar_changed
1503 || this.version.changed_since(&parsed_version);
1504 this.did_finish_parsing(new_syntax_map, cx);
1505 this.reparse = None;
1506 if parse_again {
1507 this.reparse(cx);
1508 }
1509 })
1510 .ok();
1511 }));
1512 }
1513 }
1514 }
1515
1516 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1517 self.was_changed();
1518 self.non_text_state_update_count += 1;
1519 self.syntax_map.lock().did_parse(syntax_snapshot);
1520 self.request_autoindent(cx);
1521 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1522 cx.emit(BufferEvent::Reparsed);
1523 cx.notify();
1524 }
1525
1526 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1527 self.parse_status.1.clone()
1528 }
1529
1530 /// Assign to the buffer a set of diagnostics created by a given language server.
1531 pub fn update_diagnostics(
1532 &mut self,
1533 server_id: LanguageServerId,
1534 diagnostics: DiagnosticSet,
1535 cx: &mut Context<Self>,
1536 ) {
1537 let lamport_timestamp = self.text.lamport_clock.tick();
1538 let op = Operation::UpdateDiagnostics {
1539 server_id,
1540 diagnostics: diagnostics.iter().cloned().collect(),
1541 lamport_timestamp,
1542 };
1543 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1544 self.send_operation(op, true, cx);
1545 }
1546
1547 pub fn get_diagnostics(&self, server_id: LanguageServerId) -> Option<&DiagnosticSet> {
1548 let Ok(idx) = self.diagnostics.binary_search_by_key(&server_id, |v| v.0) else {
1549 return None;
1550 };
1551 Some(&self.diagnostics[idx].1)
1552 }
1553
1554 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1555 if let Some(indent_sizes) = self.compute_autoindents() {
1556 let indent_sizes = cx.background_spawn(indent_sizes);
1557 match cx
1558 .background_executor()
1559 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1560 {
1561 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1562 Err(indent_sizes) => {
1563 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1564 let indent_sizes = indent_sizes.await;
1565 this.update(cx, |this, cx| {
1566 this.apply_autoindents(indent_sizes, cx);
1567 })
1568 .ok();
1569 }));
1570 }
1571 }
1572 } else {
1573 self.autoindent_requests.clear();
1574 }
1575 }
1576
1577 fn compute_autoindents(
1578 &self,
1579 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1580 let max_rows_between_yields = 100;
1581 let snapshot = self.snapshot();
1582 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1583 return None;
1584 }
1585
1586 let autoindent_requests = self.autoindent_requests.clone();
1587 Some(async move {
1588 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1589 for request in autoindent_requests {
1590 // Resolve each edited range to its row in the current buffer and in the
1591 // buffer before this batch of edits.
1592 let mut row_ranges = Vec::new();
1593 let mut old_to_new_rows = BTreeMap::new();
1594 let mut language_indent_sizes_by_new_row = Vec::new();
1595 for entry in &request.entries {
1596 let position = entry.range.start;
1597 let new_row = position.to_point(&snapshot).row;
1598 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1599 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1600
1601 if !entry.first_line_is_new {
1602 let old_row = position.to_point(&request.before_edit).row;
1603 old_to_new_rows.insert(old_row, new_row);
1604 }
1605 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1606 }
1607
1608 // Build a map containing the suggested indentation for each of the edited lines
1609 // with respect to the state of the buffer before these edits. This map is keyed
1610 // by the rows for these lines in the current state of the buffer.
1611 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1612 let old_edited_ranges =
1613 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1614 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1615 let mut language_indent_size = IndentSize::default();
1616 for old_edited_range in old_edited_ranges {
1617 let suggestions = request
1618 .before_edit
1619 .suggest_autoindents(old_edited_range.clone())
1620 .into_iter()
1621 .flatten();
1622 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1623 if let Some(suggestion) = suggestion {
1624 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1625
1626 // Find the indent size based on the language for this row.
1627 while let Some((row, size)) = language_indent_sizes.peek() {
1628 if *row > new_row {
1629 break;
1630 }
1631 language_indent_size = *size;
1632 language_indent_sizes.next();
1633 }
1634
1635 let suggested_indent = old_to_new_rows
1636 .get(&suggestion.basis_row)
1637 .and_then(|from_row| {
1638 Some(old_suggestions.get(from_row).copied()?.0)
1639 })
1640 .unwrap_or_else(|| {
1641 request
1642 .before_edit
1643 .indent_size_for_line(suggestion.basis_row)
1644 })
1645 .with_delta(suggestion.delta, language_indent_size);
1646 old_suggestions
1647 .insert(new_row, (suggested_indent, suggestion.within_error));
1648 }
1649 }
1650 yield_now().await;
1651 }
1652
1653 // Compute new suggestions for each line, but only include them in the result
1654 // if they differ from the old suggestion for that line.
1655 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1656 let mut language_indent_size = IndentSize::default();
1657 for (row_range, original_indent_column) in row_ranges {
1658 let new_edited_row_range = if request.is_block_mode {
1659 row_range.start..row_range.start + 1
1660 } else {
1661 row_range.clone()
1662 };
1663
1664 let suggestions = snapshot
1665 .suggest_autoindents(new_edited_row_range.clone())
1666 .into_iter()
1667 .flatten();
1668 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1669 if let Some(suggestion) = suggestion {
1670 // Find the indent size based on the language for this row.
1671 while let Some((row, size)) = language_indent_sizes.peek() {
1672 if *row > new_row {
1673 break;
1674 }
1675 language_indent_size = *size;
1676 language_indent_sizes.next();
1677 }
1678
1679 let suggested_indent = indent_sizes
1680 .get(&suggestion.basis_row)
1681 .copied()
1682 .map(|e| e.0)
1683 .unwrap_or_else(|| {
1684 snapshot.indent_size_for_line(suggestion.basis_row)
1685 })
1686 .with_delta(suggestion.delta, language_indent_size);
1687
1688 if old_suggestions.get(&new_row).map_or(
1689 true,
1690 |(old_indentation, was_within_error)| {
1691 suggested_indent != *old_indentation
1692 && (!suggestion.within_error || *was_within_error)
1693 },
1694 ) {
1695 indent_sizes.insert(
1696 new_row,
1697 (suggested_indent, request.ignore_empty_lines),
1698 );
1699 }
1700 }
1701 }
1702
1703 if let (true, Some(original_indent_column)) =
1704 (request.is_block_mode, original_indent_column)
1705 {
1706 let new_indent =
1707 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1708 *indent
1709 } else {
1710 snapshot.indent_size_for_line(row_range.start)
1711 };
1712 let delta = new_indent.len as i64 - original_indent_column as i64;
1713 if delta != 0 {
1714 for row in row_range.skip(1) {
1715 indent_sizes.entry(row).or_insert_with(|| {
1716 let mut size = snapshot.indent_size_for_line(row);
1717 if size.kind == new_indent.kind {
1718 match delta.cmp(&0) {
1719 Ordering::Greater => size.len += delta as u32,
1720 Ordering::Less => {
1721 size.len = size.len.saturating_sub(-delta as u32)
1722 }
1723 Ordering::Equal => {}
1724 }
1725 }
1726 (size, request.ignore_empty_lines)
1727 });
1728 }
1729 }
1730 }
1731
1732 yield_now().await;
1733 }
1734 }
1735
1736 indent_sizes
1737 .into_iter()
1738 .filter_map(|(row, (indent, ignore_empty_lines))| {
1739 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1740 None
1741 } else {
1742 Some((row, indent))
1743 }
1744 })
1745 .collect()
1746 })
1747 }
1748
1749 fn apply_autoindents(
1750 &mut self,
1751 indent_sizes: BTreeMap<u32, IndentSize>,
1752 cx: &mut Context<Self>,
1753 ) {
1754 self.autoindent_requests.clear();
1755
1756 let edits: Vec<_> = indent_sizes
1757 .into_iter()
1758 .filter_map(|(row, indent_size)| {
1759 let current_size = indent_size_for_line(self, row);
1760 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1761 })
1762 .collect();
1763
1764 let preserve_preview = self.preserve_preview();
1765 self.edit(edits, None, cx);
1766 if preserve_preview {
1767 self.refresh_preview();
1768 }
1769 }
1770
1771 /// Create a minimal edit that will cause the given row to be indented
1772 /// with the given size. After applying this edit, the length of the line
1773 /// will always be at least `new_size.len`.
1774 pub fn edit_for_indent_size_adjustment(
1775 row: u32,
1776 current_size: IndentSize,
1777 new_size: IndentSize,
1778 ) -> Option<(Range<Point>, String)> {
1779 if new_size.kind == current_size.kind {
1780 match new_size.len.cmp(¤t_size.len) {
1781 Ordering::Greater => {
1782 let point = Point::new(row, 0);
1783 Some((
1784 point..point,
1785 iter::repeat(new_size.char())
1786 .take((new_size.len - current_size.len) as usize)
1787 .collect::<String>(),
1788 ))
1789 }
1790
1791 Ordering::Less => Some((
1792 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1793 String::new(),
1794 )),
1795
1796 Ordering::Equal => None,
1797 }
1798 } else {
1799 Some((
1800 Point::new(row, 0)..Point::new(row, current_size.len),
1801 iter::repeat(new_size.char())
1802 .take(new_size.len as usize)
1803 .collect::<String>(),
1804 ))
1805 }
1806 }
1807
1808 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1809 /// and the given new text.
1810 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1811 let old_text = self.as_rope().clone();
1812 let base_version = self.version();
1813 cx.background_executor()
1814 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1815 let old_text = old_text.to_string();
1816 let line_ending = LineEnding::detect(&new_text);
1817 LineEnding::normalize(&mut new_text);
1818 let edits = text_diff(&old_text, &new_text);
1819 Diff {
1820 base_version,
1821 line_ending,
1822 edits,
1823 }
1824 })
1825 }
1826
1827 /// Spawns a background task that searches the buffer for any whitespace
1828 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1829 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1830 let old_text = self.as_rope().clone();
1831 let line_ending = self.line_ending();
1832 let base_version = self.version();
1833 cx.background_spawn(async move {
1834 let ranges = trailing_whitespace_ranges(&old_text);
1835 let empty = Arc::<str>::from("");
1836 Diff {
1837 base_version,
1838 line_ending,
1839 edits: ranges
1840 .into_iter()
1841 .map(|range| (range, empty.clone()))
1842 .collect(),
1843 }
1844 })
1845 }
1846
1847 /// Ensures that the buffer ends with a single newline character, and
1848 /// no other whitespace.
1849 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
1850 let len = self.len();
1851 let mut offset = len;
1852 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
1853 let non_whitespace_len = chunk
1854 .trim_end_matches(|c: char| c.is_ascii_whitespace())
1855 .len();
1856 offset -= chunk.len();
1857 offset += non_whitespace_len;
1858 if non_whitespace_len != 0 {
1859 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
1860 return;
1861 }
1862 break;
1863 }
1864 }
1865 self.edit([(offset..len, "\n")], None, cx);
1866 }
1867
1868 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
1869 /// calculated, then adjust the diff to account for those changes, and discard any
1870 /// parts of the diff that conflict with those changes.
1871 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
1872 let snapshot = self.snapshot();
1873 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
1874 let mut delta = 0;
1875 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
1876 while let Some(edit_since) = edits_since.peek() {
1877 // If the edit occurs after a diff hunk, then it does not
1878 // affect that hunk.
1879 if edit_since.old.start > range.end {
1880 break;
1881 }
1882 // If the edit precedes the diff hunk, then adjust the hunk
1883 // to reflect the edit.
1884 else if edit_since.old.end < range.start {
1885 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
1886 edits_since.next();
1887 }
1888 // If the edit intersects a diff hunk, then discard that hunk.
1889 else {
1890 return None;
1891 }
1892 }
1893
1894 let start = (range.start as i64 + delta) as usize;
1895 let end = (range.end as i64 + delta) as usize;
1896 Some((start..end, new_text))
1897 });
1898
1899 self.start_transaction();
1900 self.text.set_line_ending(diff.line_ending);
1901 self.edit(adjusted_edits, None, cx);
1902 self.end_transaction(cx)
1903 }
1904
1905 fn has_unsaved_edits(&self) -> bool {
1906 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
1907
1908 if last_version == self.version {
1909 self.has_unsaved_edits
1910 .set((last_version, has_unsaved_edits));
1911 return has_unsaved_edits;
1912 }
1913
1914 let has_edits = self.has_edits_since(&self.saved_version);
1915 self.has_unsaved_edits
1916 .set((self.version.clone(), has_edits));
1917 has_edits
1918 }
1919
1920 /// Checks if the buffer has unsaved changes.
1921 pub fn is_dirty(&self) -> bool {
1922 if self.capability == Capability::ReadOnly {
1923 return false;
1924 }
1925 if self.has_conflict {
1926 return true;
1927 }
1928 match self.file.as_ref().map(|f| f.disk_state()) {
1929 Some(DiskState::New) | Some(DiskState::Deleted) => {
1930 !self.is_empty() && self.has_unsaved_edits()
1931 }
1932 _ => self.has_unsaved_edits(),
1933 }
1934 }
1935
1936 /// Checks if the buffer and its file have both changed since the buffer
1937 /// was last saved or reloaded.
1938 pub fn has_conflict(&self) -> bool {
1939 if self.has_conflict {
1940 return true;
1941 }
1942 let Some(file) = self.file.as_ref() else {
1943 return false;
1944 };
1945 match file.disk_state() {
1946 DiskState::New => false,
1947 DiskState::Present { mtime } => match self.saved_mtime {
1948 Some(saved_mtime) => {
1949 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
1950 }
1951 None => true,
1952 },
1953 DiskState::Deleted => false,
1954 }
1955 }
1956
1957 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
1958 pub fn subscribe(&mut self) -> Subscription {
1959 self.text.subscribe()
1960 }
1961
1962 /// Adds a bit to the list of bits that are set when the buffer's text changes.
1963 ///
1964 /// This allows downstream code to check if the buffer's text has changed without
1965 /// waiting for an effect cycle, which would be required if using eents.
1966 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
1967 if let Err(ix) = self
1968 .change_bits
1969 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
1970 {
1971 self.change_bits.insert(ix, bit);
1972 }
1973 }
1974
1975 fn was_changed(&mut self) {
1976 self.change_bits.retain(|change_bit| {
1977 change_bit.upgrade().map_or(false, |bit| {
1978 bit.replace(true);
1979 true
1980 })
1981 });
1982 }
1983
1984 /// Starts a transaction, if one is not already in-progress. When undoing or
1985 /// redoing edits, all of the edits performed within a transaction are undone
1986 /// or redone together.
1987 pub fn start_transaction(&mut self) -> Option<TransactionId> {
1988 self.start_transaction_at(Instant::now())
1989 }
1990
1991 /// Starts a transaction, providing the current time. Subsequent transactions
1992 /// that occur within a short period of time will be grouped together. This
1993 /// is controlled by the buffer's undo grouping duration.
1994 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
1995 self.transaction_depth += 1;
1996 if self.was_dirty_before_starting_transaction.is_none() {
1997 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
1998 }
1999 self.text.start_transaction_at(now)
2000 }
2001
2002 /// Terminates the current transaction, if this is the outermost transaction.
2003 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2004 self.end_transaction_at(Instant::now(), cx)
2005 }
2006
2007 /// Terminates the current transaction, providing the current time. Subsequent transactions
2008 /// that occur within a short period of time will be grouped together. This
2009 /// is controlled by the buffer's undo grouping duration.
2010 pub fn end_transaction_at(
2011 &mut self,
2012 now: Instant,
2013 cx: &mut Context<Self>,
2014 ) -> Option<TransactionId> {
2015 assert!(self.transaction_depth > 0);
2016 self.transaction_depth -= 1;
2017 let was_dirty = if self.transaction_depth == 0 {
2018 self.was_dirty_before_starting_transaction.take().unwrap()
2019 } else {
2020 false
2021 };
2022 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2023 self.did_edit(&start_version, was_dirty, cx);
2024 Some(transaction_id)
2025 } else {
2026 None
2027 }
2028 }
2029
2030 /// Manually add a transaction to the buffer's undo history.
2031 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2032 self.text.push_transaction(transaction, now);
2033 }
2034
2035 /// Prevent the last transaction from being grouped with any subsequent transactions,
2036 /// even if they occur with the buffer's undo grouping duration.
2037 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2038 self.text.finalize_last_transaction()
2039 }
2040
2041 /// Manually group all changes since a given transaction.
2042 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2043 self.text.group_until_transaction(transaction_id);
2044 }
2045
2046 /// Manually remove a transaction from the buffer's undo history
2047 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2048 self.text.forget_transaction(transaction_id)
2049 }
2050
2051 /// Retrieve a transaction from the buffer's undo history
2052 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2053 self.text.get_transaction(transaction_id)
2054 }
2055
2056 /// Manually merge two transactions in the buffer's undo history.
2057 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2058 self.text.merge_transactions(transaction, destination);
2059 }
2060
2061 /// Waits for the buffer to receive operations with the given timestamps.
2062 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2063 &mut self,
2064 edit_ids: It,
2065 ) -> impl Future<Output = Result<()>> + use<It> {
2066 self.text.wait_for_edits(edit_ids)
2067 }
2068
2069 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2070 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2071 &mut self,
2072 anchors: It,
2073 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2074 self.text.wait_for_anchors(anchors)
2075 }
2076
2077 /// Waits for the buffer to receive operations up to the given version.
2078 pub fn wait_for_version(
2079 &mut self,
2080 version: clock::Global,
2081 ) -> impl Future<Output = Result<()>> + use<> {
2082 self.text.wait_for_version(version)
2083 }
2084
2085 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2086 /// [`Buffer::wait_for_version`] to resolve with an error.
2087 pub fn give_up_waiting(&mut self) {
2088 self.text.give_up_waiting();
2089 }
2090
2091 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2092 pub fn set_active_selections(
2093 &mut self,
2094 selections: Arc<[Selection<Anchor>]>,
2095 line_mode: bool,
2096 cursor_shape: CursorShape,
2097 cx: &mut Context<Self>,
2098 ) {
2099 let lamport_timestamp = self.text.lamport_clock.tick();
2100 self.remote_selections.insert(
2101 self.text.replica_id(),
2102 SelectionSet {
2103 selections: selections.clone(),
2104 lamport_timestamp,
2105 line_mode,
2106 cursor_shape,
2107 },
2108 );
2109 self.send_operation(
2110 Operation::UpdateSelections {
2111 selections,
2112 line_mode,
2113 lamport_timestamp,
2114 cursor_shape,
2115 },
2116 true,
2117 cx,
2118 );
2119 self.non_text_state_update_count += 1;
2120 cx.notify();
2121 }
2122
2123 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2124 /// this replica.
2125 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2126 if self
2127 .remote_selections
2128 .get(&self.text.replica_id())
2129 .map_or(true, |set| !set.selections.is_empty())
2130 {
2131 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2132 }
2133 }
2134
2135 /// Replaces the buffer's entire text.
2136 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2137 where
2138 T: Into<Arc<str>>,
2139 {
2140 self.autoindent_requests.clear();
2141 self.edit([(0..self.len(), text)], None, cx)
2142 }
2143
2144 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2145 /// delete, and a string of text to insert at that location.
2146 ///
2147 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2148 /// request for the edited ranges, which will be processed when the buffer finishes
2149 /// parsing.
2150 ///
2151 /// Parsing takes place at the end of a transaction, and may compute synchronously
2152 /// or asynchronously, depending on the changes.
2153 pub fn edit<I, S, T>(
2154 &mut self,
2155 edits_iter: I,
2156 autoindent_mode: Option<AutoindentMode>,
2157 cx: &mut Context<Self>,
2158 ) -> Option<clock::Lamport>
2159 where
2160 I: IntoIterator<Item = (Range<S>, T)>,
2161 S: ToOffset,
2162 T: Into<Arc<str>>,
2163 {
2164 // Skip invalid edits and coalesce contiguous ones.
2165 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2166
2167 for (range, new_text) in edits_iter {
2168 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2169
2170 if range.start > range.end {
2171 mem::swap(&mut range.start, &mut range.end);
2172 }
2173 let new_text = new_text.into();
2174 if !new_text.is_empty() || !range.is_empty() {
2175 if let Some((prev_range, prev_text)) = edits.last_mut() {
2176 if prev_range.end >= range.start {
2177 prev_range.end = cmp::max(prev_range.end, range.end);
2178 *prev_text = format!("{prev_text}{new_text}").into();
2179 } else {
2180 edits.push((range, new_text));
2181 }
2182 } else {
2183 edits.push((range, new_text));
2184 }
2185 }
2186 }
2187 if edits.is_empty() {
2188 return None;
2189 }
2190
2191 self.start_transaction();
2192 self.pending_autoindent.take();
2193 let autoindent_request = autoindent_mode
2194 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2195
2196 let edit_operation = self.text.edit(edits.iter().cloned());
2197 let edit_id = edit_operation.timestamp();
2198
2199 if let Some((before_edit, mode)) = autoindent_request {
2200 let mut delta = 0isize;
2201 let entries = edits
2202 .into_iter()
2203 .enumerate()
2204 .zip(&edit_operation.as_edit().unwrap().new_text)
2205 .map(|((ix, (range, _)), new_text)| {
2206 let new_text_length = new_text.len();
2207 let old_start = range.start.to_point(&before_edit);
2208 let new_start = (delta + range.start as isize) as usize;
2209 let range_len = range.end - range.start;
2210 delta += new_text_length as isize - range_len as isize;
2211
2212 // Decide what range of the insertion to auto-indent, and whether
2213 // the first line of the insertion should be considered a newly-inserted line
2214 // or an edit to an existing line.
2215 let mut range_of_insertion_to_indent = 0..new_text_length;
2216 let mut first_line_is_new = true;
2217
2218 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2219 let old_line_end = before_edit.line_len(old_start.row);
2220
2221 if old_start.column > old_line_start {
2222 first_line_is_new = false;
2223 }
2224
2225 if !new_text.contains('\n')
2226 && (old_start.column + (range_len as u32) < old_line_end
2227 || old_line_end == old_line_start)
2228 {
2229 first_line_is_new = false;
2230 }
2231
2232 // When inserting text starting with a newline, avoid auto-indenting the
2233 // previous line.
2234 if new_text.starts_with('\n') {
2235 range_of_insertion_to_indent.start += 1;
2236 first_line_is_new = true;
2237 }
2238
2239 let mut original_indent_column = None;
2240 if let AutoindentMode::Block {
2241 original_indent_columns,
2242 } = &mode
2243 {
2244 original_indent_column = Some(if new_text.starts_with('\n') {
2245 indent_size_for_text(
2246 new_text[range_of_insertion_to_indent.clone()].chars(),
2247 )
2248 .len
2249 } else {
2250 original_indent_columns
2251 .get(ix)
2252 .copied()
2253 .flatten()
2254 .unwrap_or_else(|| {
2255 indent_size_for_text(
2256 new_text[range_of_insertion_to_indent.clone()].chars(),
2257 )
2258 .len
2259 })
2260 });
2261
2262 // Avoid auto-indenting the line after the edit.
2263 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2264 range_of_insertion_to_indent.end -= 1;
2265 }
2266 }
2267
2268 AutoindentRequestEntry {
2269 first_line_is_new,
2270 original_indent_column,
2271 indent_size: before_edit.language_indent_size_at(range.start, cx),
2272 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2273 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2274 }
2275 })
2276 .collect();
2277
2278 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2279 before_edit,
2280 entries,
2281 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2282 ignore_empty_lines: false,
2283 }));
2284 }
2285
2286 self.end_transaction(cx);
2287 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2288 Some(edit_id)
2289 }
2290
2291 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2292 self.was_changed();
2293
2294 if self.edits_since::<usize>(old_version).next().is_none() {
2295 return;
2296 }
2297
2298 self.reparse(cx);
2299 cx.emit(BufferEvent::Edited);
2300 if was_dirty != self.is_dirty() {
2301 cx.emit(BufferEvent::DirtyChanged);
2302 }
2303 cx.notify();
2304 }
2305
2306 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2307 where
2308 I: IntoIterator<Item = Range<T>>,
2309 T: ToOffset + Copy,
2310 {
2311 let before_edit = self.snapshot();
2312 let entries = ranges
2313 .into_iter()
2314 .map(|range| AutoindentRequestEntry {
2315 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2316 first_line_is_new: true,
2317 indent_size: before_edit.language_indent_size_at(range.start, cx),
2318 original_indent_column: None,
2319 })
2320 .collect();
2321 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2322 before_edit,
2323 entries,
2324 is_block_mode: false,
2325 ignore_empty_lines: true,
2326 }));
2327 self.request_autoindent(cx);
2328 }
2329
2330 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2331 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2332 pub fn insert_empty_line(
2333 &mut self,
2334 position: impl ToPoint,
2335 space_above: bool,
2336 space_below: bool,
2337 cx: &mut Context<Self>,
2338 ) -> Point {
2339 let mut position = position.to_point(self);
2340
2341 self.start_transaction();
2342
2343 self.edit(
2344 [(position..position, "\n")],
2345 Some(AutoindentMode::EachLine),
2346 cx,
2347 );
2348
2349 if position.column > 0 {
2350 position += Point::new(1, 0);
2351 }
2352
2353 if !self.is_line_blank(position.row) {
2354 self.edit(
2355 [(position..position, "\n")],
2356 Some(AutoindentMode::EachLine),
2357 cx,
2358 );
2359 }
2360
2361 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2362 self.edit(
2363 [(position..position, "\n")],
2364 Some(AutoindentMode::EachLine),
2365 cx,
2366 );
2367 position.row += 1;
2368 }
2369
2370 if space_below
2371 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2372 {
2373 self.edit(
2374 [(position..position, "\n")],
2375 Some(AutoindentMode::EachLine),
2376 cx,
2377 );
2378 }
2379
2380 self.end_transaction(cx);
2381
2382 position
2383 }
2384
2385 /// Applies the given remote operations to the buffer.
2386 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2387 self.pending_autoindent.take();
2388 let was_dirty = self.is_dirty();
2389 let old_version = self.version.clone();
2390 let mut deferred_ops = Vec::new();
2391 let buffer_ops = ops
2392 .into_iter()
2393 .filter_map(|op| match op {
2394 Operation::Buffer(op) => Some(op),
2395 _ => {
2396 if self.can_apply_op(&op) {
2397 self.apply_op(op, cx);
2398 } else {
2399 deferred_ops.push(op);
2400 }
2401 None
2402 }
2403 })
2404 .collect::<Vec<_>>();
2405 for operation in buffer_ops.iter() {
2406 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2407 }
2408 self.text.apply_ops(buffer_ops);
2409 self.deferred_ops.insert(deferred_ops);
2410 self.flush_deferred_ops(cx);
2411 self.did_edit(&old_version, was_dirty, cx);
2412 // Notify independently of whether the buffer was edited as the operations could include a
2413 // selection update.
2414 cx.notify();
2415 }
2416
2417 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2418 let mut deferred_ops = Vec::new();
2419 for op in self.deferred_ops.drain().iter().cloned() {
2420 if self.can_apply_op(&op) {
2421 self.apply_op(op, cx);
2422 } else {
2423 deferred_ops.push(op);
2424 }
2425 }
2426 self.deferred_ops.insert(deferred_ops);
2427 }
2428
2429 pub fn has_deferred_ops(&self) -> bool {
2430 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2431 }
2432
2433 fn can_apply_op(&self, operation: &Operation) -> bool {
2434 match operation {
2435 Operation::Buffer(_) => {
2436 unreachable!("buffer operations should never be applied at this layer")
2437 }
2438 Operation::UpdateDiagnostics {
2439 diagnostics: diagnostic_set,
2440 ..
2441 } => diagnostic_set.iter().all(|diagnostic| {
2442 self.text.can_resolve(&diagnostic.range.start)
2443 && self.text.can_resolve(&diagnostic.range.end)
2444 }),
2445 Operation::UpdateSelections { selections, .. } => selections
2446 .iter()
2447 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2448 Operation::UpdateCompletionTriggers { .. } => true,
2449 }
2450 }
2451
2452 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2453 match operation {
2454 Operation::Buffer(_) => {
2455 unreachable!("buffer operations should never be applied at this layer")
2456 }
2457 Operation::UpdateDiagnostics {
2458 server_id,
2459 diagnostics: diagnostic_set,
2460 lamport_timestamp,
2461 } => {
2462 let snapshot = self.snapshot();
2463 self.apply_diagnostic_update(
2464 server_id,
2465 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2466 lamport_timestamp,
2467 cx,
2468 );
2469 }
2470 Operation::UpdateSelections {
2471 selections,
2472 lamport_timestamp,
2473 line_mode,
2474 cursor_shape,
2475 } => {
2476 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id) {
2477 if set.lamport_timestamp > lamport_timestamp {
2478 return;
2479 }
2480 }
2481
2482 self.remote_selections.insert(
2483 lamport_timestamp.replica_id,
2484 SelectionSet {
2485 selections,
2486 lamport_timestamp,
2487 line_mode,
2488 cursor_shape,
2489 },
2490 );
2491 self.text.lamport_clock.observe(lamport_timestamp);
2492 self.non_text_state_update_count += 1;
2493 }
2494 Operation::UpdateCompletionTriggers {
2495 triggers,
2496 lamport_timestamp,
2497 server_id,
2498 } => {
2499 if triggers.is_empty() {
2500 self.completion_triggers_per_language_server
2501 .remove(&server_id);
2502 self.completion_triggers = self
2503 .completion_triggers_per_language_server
2504 .values()
2505 .flat_map(|triggers| triggers.into_iter().cloned())
2506 .collect();
2507 } else {
2508 self.completion_triggers_per_language_server
2509 .insert(server_id, triggers.iter().cloned().collect());
2510 self.completion_triggers.extend(triggers);
2511 }
2512 self.text.lamport_clock.observe(lamport_timestamp);
2513 }
2514 }
2515 }
2516
2517 fn apply_diagnostic_update(
2518 &mut self,
2519 server_id: LanguageServerId,
2520 diagnostics: DiagnosticSet,
2521 lamport_timestamp: clock::Lamport,
2522 cx: &mut Context<Self>,
2523 ) {
2524 if lamport_timestamp > self.diagnostics_timestamp {
2525 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2526 if diagnostics.is_empty() {
2527 if let Ok(ix) = ix {
2528 self.diagnostics.remove(ix);
2529 }
2530 } else {
2531 match ix {
2532 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2533 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2534 };
2535 }
2536 self.diagnostics_timestamp = lamport_timestamp;
2537 self.non_text_state_update_count += 1;
2538 self.text.lamport_clock.observe(lamport_timestamp);
2539 cx.notify();
2540 cx.emit(BufferEvent::DiagnosticsUpdated);
2541 }
2542 }
2543
2544 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2545 self.was_changed();
2546 cx.emit(BufferEvent::Operation {
2547 operation,
2548 is_local,
2549 });
2550 }
2551
2552 /// Removes the selections for a given peer.
2553 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2554 self.remote_selections.remove(&replica_id);
2555 cx.notify();
2556 }
2557
2558 /// Undoes the most recent transaction.
2559 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2560 let was_dirty = self.is_dirty();
2561 let old_version = self.version.clone();
2562
2563 if let Some((transaction_id, operation)) = self.text.undo() {
2564 self.send_operation(Operation::Buffer(operation), true, cx);
2565 self.did_edit(&old_version, was_dirty, cx);
2566 Some(transaction_id)
2567 } else {
2568 None
2569 }
2570 }
2571
2572 /// Manually undoes a specific transaction in the buffer's undo history.
2573 pub fn undo_transaction(
2574 &mut self,
2575 transaction_id: TransactionId,
2576 cx: &mut Context<Self>,
2577 ) -> bool {
2578 let was_dirty = self.is_dirty();
2579 let old_version = self.version.clone();
2580 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2581 self.send_operation(Operation::Buffer(operation), true, cx);
2582 self.did_edit(&old_version, was_dirty, cx);
2583 true
2584 } else {
2585 false
2586 }
2587 }
2588
2589 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2590 pub fn undo_to_transaction(
2591 &mut self,
2592 transaction_id: TransactionId,
2593 cx: &mut Context<Self>,
2594 ) -> bool {
2595 let was_dirty = self.is_dirty();
2596 let old_version = self.version.clone();
2597
2598 let operations = self.text.undo_to_transaction(transaction_id);
2599 let undone = !operations.is_empty();
2600 for operation in operations {
2601 self.send_operation(Operation::Buffer(operation), true, cx);
2602 }
2603 if undone {
2604 self.did_edit(&old_version, was_dirty, cx)
2605 }
2606 undone
2607 }
2608
2609 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2610 let was_dirty = self.is_dirty();
2611 let operation = self.text.undo_operations(counts);
2612 let old_version = self.version.clone();
2613 self.send_operation(Operation::Buffer(operation), true, cx);
2614 self.did_edit(&old_version, was_dirty, cx);
2615 }
2616
2617 /// Manually redoes a specific transaction in the buffer's redo history.
2618 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2619 let was_dirty = self.is_dirty();
2620 let old_version = self.version.clone();
2621
2622 if let Some((transaction_id, operation)) = self.text.redo() {
2623 self.send_operation(Operation::Buffer(operation), true, cx);
2624 self.did_edit(&old_version, was_dirty, cx);
2625 Some(transaction_id)
2626 } else {
2627 None
2628 }
2629 }
2630
2631 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2632 pub fn redo_to_transaction(
2633 &mut self,
2634 transaction_id: TransactionId,
2635 cx: &mut Context<Self>,
2636 ) -> bool {
2637 let was_dirty = self.is_dirty();
2638 let old_version = self.version.clone();
2639
2640 let operations = self.text.redo_to_transaction(transaction_id);
2641 let redone = !operations.is_empty();
2642 for operation in operations {
2643 self.send_operation(Operation::Buffer(operation), true, cx);
2644 }
2645 if redone {
2646 self.did_edit(&old_version, was_dirty, cx)
2647 }
2648 redone
2649 }
2650
2651 /// Override current completion triggers with the user-provided completion triggers.
2652 pub fn set_completion_triggers(
2653 &mut self,
2654 server_id: LanguageServerId,
2655 triggers: BTreeSet<String>,
2656 cx: &mut Context<Self>,
2657 ) {
2658 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2659 if triggers.is_empty() {
2660 self.completion_triggers_per_language_server
2661 .remove(&server_id);
2662 self.completion_triggers = self
2663 .completion_triggers_per_language_server
2664 .values()
2665 .flat_map(|triggers| triggers.into_iter().cloned())
2666 .collect();
2667 } else {
2668 self.completion_triggers_per_language_server
2669 .insert(server_id, triggers.clone());
2670 self.completion_triggers.extend(triggers.iter().cloned());
2671 }
2672 self.send_operation(
2673 Operation::UpdateCompletionTriggers {
2674 triggers: triggers.iter().cloned().collect(),
2675 lamport_timestamp: self.completion_triggers_timestamp,
2676 server_id,
2677 },
2678 true,
2679 cx,
2680 );
2681 cx.notify();
2682 }
2683
2684 /// Returns a list of strings which trigger a completion menu for this language.
2685 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2686 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2687 &self.completion_triggers
2688 }
2689
2690 /// Call this directly after performing edits to prevent the preview tab
2691 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2692 /// to return false until there are additional edits.
2693 pub fn refresh_preview(&mut self) {
2694 self.preview_version = self.version.clone();
2695 }
2696
2697 /// Whether we should preserve the preview status of a tab containing this buffer.
2698 pub fn preserve_preview(&self) -> bool {
2699 !self.has_edits_since(&self.preview_version)
2700 }
2701}
2702
2703#[doc(hidden)]
2704#[cfg(any(test, feature = "test-support"))]
2705impl Buffer {
2706 pub fn edit_via_marked_text(
2707 &mut self,
2708 marked_string: &str,
2709 autoindent_mode: Option<AutoindentMode>,
2710 cx: &mut Context<Self>,
2711 ) {
2712 let edits = self.edits_for_marked_text(marked_string);
2713 self.edit(edits, autoindent_mode, cx);
2714 }
2715
2716 pub fn set_group_interval(&mut self, group_interval: Duration) {
2717 self.text.set_group_interval(group_interval);
2718 }
2719
2720 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2721 where
2722 T: rand::Rng,
2723 {
2724 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2725 let mut last_end = None;
2726 for _ in 0..old_range_count {
2727 if last_end.map_or(false, |last_end| last_end >= self.len()) {
2728 break;
2729 }
2730
2731 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2732 let mut range = self.random_byte_range(new_start, rng);
2733 if rng.gen_bool(0.2) {
2734 mem::swap(&mut range.start, &mut range.end);
2735 }
2736 last_end = Some(range.end);
2737
2738 let new_text_len = rng.gen_range(0..10);
2739 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2740 new_text = new_text.to_uppercase();
2741
2742 edits.push((range, new_text));
2743 }
2744 log::info!("mutating buffer {} with {:?}", self.replica_id(), edits);
2745 self.edit(edits, None, cx);
2746 }
2747
2748 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
2749 let was_dirty = self.is_dirty();
2750 let old_version = self.version.clone();
2751
2752 let ops = self.text.randomly_undo_redo(rng);
2753 if !ops.is_empty() {
2754 for op in ops {
2755 self.send_operation(Operation::Buffer(op), true, cx);
2756 self.did_edit(&old_version, was_dirty, cx);
2757 }
2758 }
2759 }
2760}
2761
2762impl EventEmitter<BufferEvent> for Buffer {}
2763
2764impl Deref for Buffer {
2765 type Target = TextBuffer;
2766
2767 fn deref(&self) -> &Self::Target {
2768 &self.text
2769 }
2770}
2771
2772impl BufferSnapshot {
2773 /// Returns [`IndentSize`] for a given line that respects user settings and
2774 /// language preferences.
2775 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
2776 indent_size_for_line(self, row)
2777 }
2778
2779 /// Returns [`IndentSize`] for a given position that respects user settings
2780 /// and language preferences.
2781 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
2782 let settings = language_settings(
2783 self.language_at(position).map(|l| l.name()),
2784 self.file(),
2785 cx,
2786 );
2787 if settings.hard_tabs {
2788 IndentSize::tab()
2789 } else {
2790 IndentSize::spaces(settings.tab_size.get())
2791 }
2792 }
2793
2794 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
2795 /// is passed in as `single_indent_size`.
2796 pub fn suggested_indents(
2797 &self,
2798 rows: impl Iterator<Item = u32>,
2799 single_indent_size: IndentSize,
2800 ) -> BTreeMap<u32, IndentSize> {
2801 let mut result = BTreeMap::new();
2802
2803 for row_range in contiguous_ranges(rows, 10) {
2804 let suggestions = match self.suggest_autoindents(row_range.clone()) {
2805 Some(suggestions) => suggestions,
2806 _ => break,
2807 };
2808
2809 for (row, suggestion) in row_range.zip(suggestions) {
2810 let indent_size = if let Some(suggestion) = suggestion {
2811 result
2812 .get(&suggestion.basis_row)
2813 .copied()
2814 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
2815 .with_delta(suggestion.delta, single_indent_size)
2816 } else {
2817 self.indent_size_for_line(row)
2818 };
2819
2820 result.insert(row, indent_size);
2821 }
2822 }
2823
2824 result
2825 }
2826
2827 fn suggest_autoindents(
2828 &self,
2829 row_range: Range<u32>,
2830 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
2831 let config = &self.language.as_ref()?.config;
2832 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
2833
2834 // Find the suggested indentation ranges based on the syntax tree.
2835 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
2836 let end = Point::new(row_range.end, 0);
2837 let range = (start..end).to_offset(&self.text);
2838 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2839 Some(&grammar.indents_config.as_ref()?.query)
2840 });
2841 let indent_configs = matches
2842 .grammars()
2843 .iter()
2844 .map(|grammar| grammar.indents_config.as_ref().unwrap())
2845 .collect::<Vec<_>>();
2846
2847 let mut indent_ranges = Vec::<Range<Point>>::new();
2848 let mut outdent_positions = Vec::<Point>::new();
2849 while let Some(mat) = matches.peek() {
2850 let mut start: Option<Point> = None;
2851 let mut end: Option<Point> = None;
2852
2853 let config = &indent_configs[mat.grammar_index];
2854 for capture in mat.captures {
2855 if capture.index == config.indent_capture_ix {
2856 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
2857 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
2858 } else if Some(capture.index) == config.start_capture_ix {
2859 start = Some(Point::from_ts_point(capture.node.end_position()));
2860 } else if Some(capture.index) == config.end_capture_ix {
2861 end = Some(Point::from_ts_point(capture.node.start_position()));
2862 } else if Some(capture.index) == config.outdent_capture_ix {
2863 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
2864 }
2865 }
2866
2867 matches.advance();
2868 if let Some((start, end)) = start.zip(end) {
2869 if start.row == end.row {
2870 continue;
2871 }
2872
2873 let range = start..end;
2874 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
2875 Err(ix) => indent_ranges.insert(ix, range),
2876 Ok(ix) => {
2877 let prev_range = &mut indent_ranges[ix];
2878 prev_range.end = prev_range.end.max(range.end);
2879 }
2880 }
2881 }
2882 }
2883
2884 let mut error_ranges = Vec::<Range<Point>>::new();
2885 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
2886 grammar.error_query.as_ref()
2887 });
2888 while let Some(mat) = matches.peek() {
2889 let node = mat.captures[0].node;
2890 let start = Point::from_ts_point(node.start_position());
2891 let end = Point::from_ts_point(node.end_position());
2892 let range = start..end;
2893 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
2894 Ok(ix) | Err(ix) => ix,
2895 };
2896 let mut end_ix = ix;
2897 while let Some(existing_range) = error_ranges.get(end_ix) {
2898 if existing_range.end < end {
2899 end_ix += 1;
2900 } else {
2901 break;
2902 }
2903 }
2904 error_ranges.splice(ix..end_ix, [range]);
2905 matches.advance();
2906 }
2907
2908 outdent_positions.sort();
2909 for outdent_position in outdent_positions {
2910 // find the innermost indent range containing this outdent_position
2911 // set its end to the outdent position
2912 if let Some(range_to_truncate) = indent_ranges
2913 .iter_mut()
2914 .filter(|indent_range| indent_range.contains(&outdent_position))
2915 .next_back()
2916 {
2917 range_to_truncate.end = outdent_position;
2918 }
2919 }
2920
2921 // Find the suggested indentation increases and decreased based on regexes.
2922 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
2923 self.for_each_line(
2924 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
2925 ..Point::new(row_range.end, 0),
2926 |row, line| {
2927 if config
2928 .decrease_indent_pattern
2929 .as_ref()
2930 .map_or(false, |regex| regex.is_match(line))
2931 {
2932 indent_change_rows.push((row, Ordering::Less));
2933 }
2934 if config
2935 .increase_indent_pattern
2936 .as_ref()
2937 .map_or(false, |regex| regex.is_match(line))
2938 {
2939 indent_change_rows.push((row + 1, Ordering::Greater));
2940 }
2941 },
2942 );
2943
2944 let mut indent_changes = indent_change_rows.into_iter().peekable();
2945 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
2946 prev_non_blank_row.unwrap_or(0)
2947 } else {
2948 row_range.start.saturating_sub(1)
2949 };
2950 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
2951 Some(row_range.map(move |row| {
2952 let row_start = Point::new(row, self.indent_size_for_line(row).len);
2953
2954 let mut indent_from_prev_row = false;
2955 let mut outdent_from_prev_row = false;
2956 let mut outdent_to_row = u32::MAX;
2957 let mut from_regex = false;
2958
2959 while let Some((indent_row, delta)) = indent_changes.peek() {
2960 match indent_row.cmp(&row) {
2961 Ordering::Equal => match delta {
2962 Ordering::Less => {
2963 from_regex = true;
2964 outdent_from_prev_row = true
2965 }
2966 Ordering::Greater => {
2967 indent_from_prev_row = true;
2968 from_regex = true
2969 }
2970 _ => {}
2971 },
2972
2973 Ordering::Greater => break,
2974 Ordering::Less => {}
2975 }
2976
2977 indent_changes.next();
2978 }
2979
2980 for range in &indent_ranges {
2981 if range.start.row >= row {
2982 break;
2983 }
2984 if range.start.row == prev_row && range.end > row_start {
2985 indent_from_prev_row = true;
2986 }
2987 if range.end > prev_row_start && range.end <= row_start {
2988 outdent_to_row = outdent_to_row.min(range.start.row);
2989 }
2990 }
2991
2992 let within_error = error_ranges
2993 .iter()
2994 .any(|e| e.start.row < row && e.end > row_start);
2995
2996 let suggestion = if outdent_to_row == prev_row
2997 || (outdent_from_prev_row && indent_from_prev_row)
2998 {
2999 Some(IndentSuggestion {
3000 basis_row: prev_row,
3001 delta: Ordering::Equal,
3002 within_error: within_error && !from_regex,
3003 })
3004 } else if indent_from_prev_row {
3005 Some(IndentSuggestion {
3006 basis_row: prev_row,
3007 delta: Ordering::Greater,
3008 within_error: within_error && !from_regex,
3009 })
3010 } else if outdent_to_row < prev_row {
3011 Some(IndentSuggestion {
3012 basis_row: outdent_to_row,
3013 delta: Ordering::Equal,
3014 within_error: within_error && !from_regex,
3015 })
3016 } else if outdent_from_prev_row {
3017 Some(IndentSuggestion {
3018 basis_row: prev_row,
3019 delta: Ordering::Less,
3020 within_error: within_error && !from_regex,
3021 })
3022 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3023 {
3024 Some(IndentSuggestion {
3025 basis_row: prev_row,
3026 delta: Ordering::Equal,
3027 within_error: within_error && !from_regex,
3028 })
3029 } else {
3030 None
3031 };
3032
3033 prev_row = row;
3034 prev_row_start = row_start;
3035 suggestion
3036 }))
3037 }
3038
3039 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3040 while row > 0 {
3041 row -= 1;
3042 if !self.is_line_blank(row) {
3043 return Some(row);
3044 }
3045 }
3046 None
3047 }
3048
3049 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures, Vec<HighlightMap>) {
3050 let captures = self.syntax.captures(range, &self.text, |grammar| {
3051 grammar.highlights_query.as_ref()
3052 });
3053 let highlight_maps = captures
3054 .grammars()
3055 .iter()
3056 .map(|grammar| grammar.highlight_map())
3057 .collect();
3058 (captures, highlight_maps)
3059 }
3060
3061 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3062 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3063 /// returned in chunks where each chunk has a single syntax highlighting style and
3064 /// diagnostic status.
3065 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks {
3066 let range = range.start.to_offset(self)..range.end.to_offset(self);
3067
3068 let mut syntax = None;
3069 if language_aware {
3070 syntax = Some(self.get_highlights(range.clone()));
3071 }
3072 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3073 let diagnostics = language_aware;
3074 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3075 }
3076
3077 pub fn highlighted_text_for_range<T: ToOffset>(
3078 &self,
3079 range: Range<T>,
3080 override_style: Option<HighlightStyle>,
3081 syntax_theme: &SyntaxTheme,
3082 ) -> HighlightedText {
3083 HighlightedText::from_buffer_range(
3084 range,
3085 &self.text,
3086 &self.syntax,
3087 override_style,
3088 syntax_theme,
3089 )
3090 }
3091
3092 /// Invokes the given callback for each line of text in the given range of the buffer.
3093 /// Uses callback to avoid allocating a string for each line.
3094 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3095 let mut line = String::new();
3096 let mut row = range.start.row;
3097 for chunk in self
3098 .as_rope()
3099 .chunks_in_range(range.to_offset(self))
3100 .chain(["\n"])
3101 {
3102 for (newline_ix, text) in chunk.split('\n').enumerate() {
3103 if newline_ix > 0 {
3104 callback(row, &line);
3105 row += 1;
3106 line.clear();
3107 }
3108 line.push_str(text);
3109 }
3110 }
3111 }
3112
3113 /// Iterates over every [`SyntaxLayer`] in the buffer.
3114 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer> + '_ {
3115 self.syntax
3116 .layers_for_range(0..self.len(), &self.text, true)
3117 }
3118
3119 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer> {
3120 let offset = position.to_offset(self);
3121 self.syntax
3122 .layers_for_range(offset..offset, &self.text, false)
3123 .filter(|l| l.node().end_byte() > offset)
3124 .last()
3125 }
3126
3127 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3128 &self,
3129 range: Range<D>,
3130 ) -> Option<SyntaxLayer> {
3131 let range = range.to_offset(self);
3132 return self
3133 .syntax
3134 .layers_for_range(range, &self.text, false)
3135 .max_by(|a, b| {
3136 if a.depth != b.depth {
3137 a.depth.cmp(&b.depth)
3138 } else if a.offset.0 != b.offset.0 {
3139 a.offset.0.cmp(&b.offset.0)
3140 } else {
3141 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3142 }
3143 });
3144 }
3145
3146 /// Returns the main [`Language`].
3147 pub fn language(&self) -> Option<&Arc<Language>> {
3148 self.language.as_ref()
3149 }
3150
3151 /// Returns the [`Language`] at the given location.
3152 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3153 self.syntax_layer_at(position)
3154 .map(|info| info.language)
3155 .or(self.language.as_ref())
3156 }
3157
3158 /// Returns the settings for the language at the given location.
3159 pub fn settings_at<'a, D: ToOffset>(
3160 &'a self,
3161 position: D,
3162 cx: &'a App,
3163 ) -> Cow<'a, LanguageSettings> {
3164 language_settings(
3165 self.language_at(position).map(|l| l.name()),
3166 self.file.as_ref(),
3167 cx,
3168 )
3169 }
3170
3171 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3172 CharClassifier::new(self.language_scope_at(point))
3173 }
3174
3175 /// Returns the [`LanguageScope`] at the given location.
3176 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3177 let offset = position.to_offset(self);
3178 let mut scope = None;
3179 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3180
3181 // Use the layer that has the smallest node intersecting the given point.
3182 for layer in self
3183 .syntax
3184 .layers_for_range(offset..offset, &self.text, false)
3185 {
3186 let mut cursor = layer.node().walk();
3187
3188 let mut range = None;
3189 loop {
3190 let child_range = cursor.node().byte_range();
3191 if !child_range.contains(&offset) {
3192 break;
3193 }
3194
3195 range = Some(child_range);
3196 if cursor.goto_first_child_for_byte(offset).is_none() {
3197 break;
3198 }
3199 }
3200
3201 if let Some(range) = range {
3202 if smallest_range_and_depth.as_ref().map_or(
3203 true,
3204 |(smallest_range, smallest_range_depth)| {
3205 if layer.depth > *smallest_range_depth {
3206 true
3207 } else if layer.depth == *smallest_range_depth {
3208 range.len() < smallest_range.len()
3209 } else {
3210 false
3211 }
3212 },
3213 ) {
3214 smallest_range_and_depth = Some((range, layer.depth));
3215 scope = Some(LanguageScope {
3216 language: layer.language.clone(),
3217 override_id: layer.override_id(offset, &self.text),
3218 });
3219 }
3220 }
3221 }
3222
3223 scope.or_else(|| {
3224 self.language.clone().map(|language| LanguageScope {
3225 language,
3226 override_id: None,
3227 })
3228 })
3229 }
3230
3231 /// Returns a tuple of the range and character kind of the word
3232 /// surrounding the given position.
3233 pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
3234 let mut start = start.to_offset(self);
3235 let mut end = start;
3236 let mut next_chars = self.chars_at(start).peekable();
3237 let mut prev_chars = self.reversed_chars_at(start).peekable();
3238
3239 let classifier = self.char_classifier_at(start);
3240 let word_kind = cmp::max(
3241 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3242 next_chars.peek().copied().map(|c| classifier.kind(c)),
3243 );
3244
3245 for ch in prev_chars {
3246 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3247 start -= ch.len_utf8();
3248 } else {
3249 break;
3250 }
3251 }
3252
3253 for ch in next_chars {
3254 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3255 end += ch.len_utf8();
3256 } else {
3257 break;
3258 }
3259 }
3260
3261 (start..end, word_kind)
3262 }
3263
3264 /// Returns the closest syntax node enclosing the given range.
3265 pub fn syntax_ancestor<'a, T: ToOffset>(
3266 &'a self,
3267 range: Range<T>,
3268 ) -> Option<tree_sitter::Node<'a>> {
3269 let range = range.start.to_offset(self)..range.end.to_offset(self);
3270 let mut result: Option<tree_sitter::Node<'a>> = None;
3271 'outer: for layer in self
3272 .syntax
3273 .layers_for_range(range.clone(), &self.text, true)
3274 {
3275 let mut cursor = layer.node().walk();
3276
3277 // Descend to the first leaf that touches the start of the range,
3278 // and if the range is non-empty, extends beyond the start.
3279 while cursor.goto_first_child_for_byte(range.start).is_some() {
3280 if !range.is_empty() && cursor.node().end_byte() == range.start {
3281 cursor.goto_next_sibling();
3282 }
3283 }
3284
3285 // Ascend to the smallest ancestor that strictly contains the range.
3286 loop {
3287 let node_range = cursor.node().byte_range();
3288 if node_range.start <= range.start
3289 && node_range.end >= range.end
3290 && node_range.len() > range.len()
3291 {
3292 break;
3293 }
3294 if !cursor.goto_parent() {
3295 continue 'outer;
3296 }
3297 }
3298
3299 let left_node = cursor.node();
3300 let mut layer_result = left_node;
3301
3302 // For an empty range, try to find another node immediately to the right of the range.
3303 if left_node.end_byte() == range.start {
3304 let mut right_node = None;
3305 while !cursor.goto_next_sibling() {
3306 if !cursor.goto_parent() {
3307 break;
3308 }
3309 }
3310
3311 while cursor.node().start_byte() == range.start {
3312 right_node = Some(cursor.node());
3313 if !cursor.goto_first_child() {
3314 break;
3315 }
3316 }
3317
3318 // If there is a candidate node on both sides of the (empty) range, then
3319 // decide between the two by favoring a named node over an anonymous token.
3320 // If both nodes are the same in that regard, favor the right one.
3321 if let Some(right_node) = right_node {
3322 if right_node.is_named() || !left_node.is_named() {
3323 layer_result = right_node;
3324 }
3325 }
3326 }
3327
3328 if let Some(previous_result) = &result {
3329 if previous_result.byte_range().len() < layer_result.byte_range().len() {
3330 continue;
3331 }
3332 }
3333 result = Some(layer_result);
3334 }
3335
3336 result
3337 }
3338
3339 /// Returns the outline for the buffer.
3340 ///
3341 /// This method allows passing an optional [`SyntaxTheme`] to
3342 /// syntax-highlight the returned symbols.
3343 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
3344 self.outline_items_containing(0..self.len(), true, theme)
3345 .map(Outline::new)
3346 }
3347
3348 /// Returns all the symbols that contain the given position.
3349 ///
3350 /// This method allows passing an optional [`SyntaxTheme`] to
3351 /// syntax-highlight the returned symbols.
3352 pub fn symbols_containing<T: ToOffset>(
3353 &self,
3354 position: T,
3355 theme: Option<&SyntaxTheme>,
3356 ) -> Option<Vec<OutlineItem<Anchor>>> {
3357 let position = position.to_offset(self);
3358 let mut items = self.outline_items_containing(
3359 position.saturating_sub(1)..self.len().min(position + 1),
3360 false,
3361 theme,
3362 )?;
3363 let mut prev_depth = None;
3364 items.retain(|item| {
3365 let result = prev_depth.map_or(true, |prev_depth| item.depth > prev_depth);
3366 prev_depth = Some(item.depth);
3367 result
3368 });
3369 Some(items)
3370 }
3371
3372 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3373 let range = range.to_offset(self);
3374 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3375 grammar.outline_config.as_ref().map(|c| &c.query)
3376 });
3377 let configs = matches
3378 .grammars()
3379 .iter()
3380 .map(|g| g.outline_config.as_ref().unwrap())
3381 .collect::<Vec<_>>();
3382
3383 while let Some(mat) = matches.peek() {
3384 let config = &configs[mat.grammar_index];
3385 let containing_item_node = maybe!({
3386 let item_node = mat.captures.iter().find_map(|cap| {
3387 if cap.index == config.item_capture_ix {
3388 Some(cap.node)
3389 } else {
3390 None
3391 }
3392 })?;
3393
3394 let item_byte_range = item_node.byte_range();
3395 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3396 None
3397 } else {
3398 Some(item_node)
3399 }
3400 });
3401
3402 if let Some(item_node) = containing_item_node {
3403 return Some(
3404 Point::from_ts_point(item_node.start_position())
3405 ..Point::from_ts_point(item_node.end_position()),
3406 );
3407 }
3408
3409 matches.advance();
3410 }
3411 None
3412 }
3413
3414 pub fn outline_items_containing<T: ToOffset>(
3415 &self,
3416 range: Range<T>,
3417 include_extra_context: bool,
3418 theme: Option<&SyntaxTheme>,
3419 ) -> Option<Vec<OutlineItem<Anchor>>> {
3420 let range = range.to_offset(self);
3421 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3422 grammar.outline_config.as_ref().map(|c| &c.query)
3423 });
3424 let configs = matches
3425 .grammars()
3426 .iter()
3427 .map(|g| g.outline_config.as_ref().unwrap())
3428 .collect::<Vec<_>>();
3429
3430 let mut items = Vec::new();
3431 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3432 while let Some(mat) = matches.peek() {
3433 let config = &configs[mat.grammar_index];
3434 if let Some(item) =
3435 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3436 {
3437 items.push(item);
3438 } else if let Some(capture) = mat
3439 .captures
3440 .iter()
3441 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3442 {
3443 let capture_range = capture.node.start_position()..capture.node.end_position();
3444 let mut capture_row_range =
3445 capture_range.start.row as u32..capture_range.end.row as u32;
3446 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3447 {
3448 capture_row_range.end -= 1;
3449 }
3450 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3451 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3452 last_row_range.end = capture_row_range.end;
3453 } else {
3454 annotation_row_ranges.push(capture_row_range);
3455 }
3456 } else {
3457 annotation_row_ranges.push(capture_row_range);
3458 }
3459 }
3460 matches.advance();
3461 }
3462
3463 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3464
3465 // Assign depths based on containment relationships and convert to anchors.
3466 let mut item_ends_stack = Vec::<Point>::new();
3467 let mut anchor_items = Vec::new();
3468 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3469 for item in items {
3470 while let Some(last_end) = item_ends_stack.last().copied() {
3471 if last_end < item.range.end {
3472 item_ends_stack.pop();
3473 } else {
3474 break;
3475 }
3476 }
3477
3478 let mut annotation_row_range = None;
3479 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3480 let row_preceding_item = item.range.start.row.saturating_sub(1);
3481 if next_annotation_row_range.end < row_preceding_item {
3482 annotation_row_ranges.next();
3483 } else {
3484 if next_annotation_row_range.end == row_preceding_item {
3485 annotation_row_range = Some(next_annotation_row_range.clone());
3486 annotation_row_ranges.next();
3487 }
3488 break;
3489 }
3490 }
3491
3492 anchor_items.push(OutlineItem {
3493 depth: item_ends_stack.len(),
3494 range: self.anchor_after(item.range.start)..self.anchor_before(item.range.end),
3495 text: item.text,
3496 highlight_ranges: item.highlight_ranges,
3497 name_ranges: item.name_ranges,
3498 body_range: item.body_range.map(|body_range| {
3499 self.anchor_after(body_range.start)..self.anchor_before(body_range.end)
3500 }),
3501 annotation_range: annotation_row_range.map(|annotation_range| {
3502 self.anchor_after(Point::new(annotation_range.start, 0))
3503 ..self.anchor_before(Point::new(
3504 annotation_range.end,
3505 self.line_len(annotation_range.end),
3506 ))
3507 }),
3508 });
3509 item_ends_stack.push(item.range.end);
3510 }
3511
3512 Some(anchor_items)
3513 }
3514
3515 fn next_outline_item(
3516 &self,
3517 config: &OutlineConfig,
3518 mat: &SyntaxMapMatch,
3519 range: &Range<usize>,
3520 include_extra_context: bool,
3521 theme: Option<&SyntaxTheme>,
3522 ) -> Option<OutlineItem<Point>> {
3523 let item_node = mat.captures.iter().find_map(|cap| {
3524 if cap.index == config.item_capture_ix {
3525 Some(cap.node)
3526 } else {
3527 None
3528 }
3529 })?;
3530
3531 let item_byte_range = item_node.byte_range();
3532 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3533 return None;
3534 }
3535 let item_point_range = Point::from_ts_point(item_node.start_position())
3536 ..Point::from_ts_point(item_node.end_position());
3537
3538 let mut open_point = None;
3539 let mut close_point = None;
3540 let mut buffer_ranges = Vec::new();
3541 for capture in mat.captures {
3542 let node_is_name;
3543 if capture.index == config.name_capture_ix {
3544 node_is_name = true;
3545 } else if Some(capture.index) == config.context_capture_ix
3546 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
3547 {
3548 node_is_name = false;
3549 } else {
3550 if Some(capture.index) == config.open_capture_ix {
3551 open_point = Some(Point::from_ts_point(capture.node.end_position()));
3552 } else if Some(capture.index) == config.close_capture_ix {
3553 close_point = Some(Point::from_ts_point(capture.node.start_position()));
3554 }
3555
3556 continue;
3557 }
3558
3559 let mut range = capture.node.start_byte()..capture.node.end_byte();
3560 let start = capture.node.start_position();
3561 if capture.node.end_position().row > start.row {
3562 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
3563 }
3564
3565 if !range.is_empty() {
3566 buffer_ranges.push((range, node_is_name));
3567 }
3568 }
3569 if buffer_ranges.is_empty() {
3570 return None;
3571 }
3572 let mut text = String::new();
3573 let mut highlight_ranges = Vec::new();
3574 let mut name_ranges = Vec::new();
3575 let mut chunks = self.chunks(
3576 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end,
3577 true,
3578 );
3579 let mut last_buffer_range_end = 0;
3580
3581 for (buffer_range, is_name) in buffer_ranges {
3582 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
3583 if space_added {
3584 text.push(' ');
3585 }
3586 let before_append_len = text.len();
3587 let mut offset = buffer_range.start;
3588 chunks.seek(buffer_range.clone());
3589 for mut chunk in chunks.by_ref() {
3590 if chunk.text.len() > buffer_range.end - offset {
3591 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
3592 offset = buffer_range.end;
3593 } else {
3594 offset += chunk.text.len();
3595 }
3596 let style = chunk
3597 .syntax_highlight_id
3598 .zip(theme)
3599 .and_then(|(highlight, theme)| highlight.style(theme));
3600 if let Some(style) = style {
3601 let start = text.len();
3602 let end = start + chunk.text.len();
3603 highlight_ranges.push((start..end, style));
3604 }
3605 text.push_str(chunk.text);
3606 if offset >= buffer_range.end {
3607 break;
3608 }
3609 }
3610 if is_name {
3611 let after_append_len = text.len();
3612 let start = if space_added && !name_ranges.is_empty() {
3613 before_append_len - 1
3614 } else {
3615 before_append_len
3616 };
3617 name_ranges.push(start..after_append_len);
3618 }
3619 last_buffer_range_end = buffer_range.end;
3620 }
3621
3622 Some(OutlineItem {
3623 depth: 0, // We'll calculate the depth later
3624 range: item_point_range,
3625 text,
3626 highlight_ranges,
3627 name_ranges,
3628 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
3629 annotation_range: None,
3630 })
3631 }
3632
3633 pub fn function_body_fold_ranges<T: ToOffset>(
3634 &self,
3635 within: Range<T>,
3636 ) -> impl Iterator<Item = Range<usize>> + '_ {
3637 self.text_object_ranges(within, TreeSitterOptions::default())
3638 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
3639 }
3640
3641 /// For each grammar in the language, runs the provided
3642 /// [`tree_sitter::Query`] against the given range.
3643 pub fn matches(
3644 &self,
3645 range: Range<usize>,
3646 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3647 ) -> SyntaxMapMatches {
3648 self.syntax.matches(range, self, query)
3649 }
3650
3651 pub fn all_bracket_ranges(
3652 &self,
3653 range: Range<usize>,
3654 ) -> impl Iterator<Item = BracketMatch> + '_ {
3655 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3656 grammar.brackets_config.as_ref().map(|c| &c.query)
3657 });
3658 let configs = matches
3659 .grammars()
3660 .iter()
3661 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
3662 .collect::<Vec<_>>();
3663
3664 iter::from_fn(move || {
3665 while let Some(mat) = matches.peek() {
3666 let mut open = None;
3667 let mut close = None;
3668 let config = &configs[mat.grammar_index];
3669 let pattern = &config.patterns[mat.pattern_index];
3670 for capture in mat.captures {
3671 if capture.index == config.open_capture_ix {
3672 open = Some(capture.node.byte_range());
3673 } else if capture.index == config.close_capture_ix {
3674 close = Some(capture.node.byte_range());
3675 }
3676 }
3677
3678 matches.advance();
3679
3680 let Some((open_range, close_range)) = open.zip(close) else {
3681 continue;
3682 };
3683
3684 let bracket_range = open_range.start..=close_range.end;
3685 if !bracket_range.overlaps(&range) {
3686 continue;
3687 }
3688
3689 return Some(BracketMatch {
3690 open_range,
3691 close_range,
3692 newline_only: pattern.newline_only,
3693 });
3694 }
3695 None
3696 })
3697 }
3698
3699 /// Returns bracket range pairs overlapping or adjacent to `range`
3700 pub fn bracket_ranges<T: ToOffset>(
3701 &self,
3702 range: Range<T>,
3703 ) -> impl Iterator<Item = BracketMatch> + '_ {
3704 // Find bracket pairs that *inclusively* contain the given range.
3705 let range = range.start.to_offset(self).saturating_sub(1)
3706 ..self.len().min(range.end.to_offset(self) + 1);
3707 self.all_bracket_ranges(range)
3708 .filter(|pair| !pair.newline_only)
3709 }
3710
3711 pub fn text_object_ranges<T: ToOffset>(
3712 &self,
3713 range: Range<T>,
3714 options: TreeSitterOptions,
3715 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
3716 let range = range.start.to_offset(self).saturating_sub(1)
3717 ..self.len().min(range.end.to_offset(self) + 1);
3718
3719 let mut matches =
3720 self.syntax
3721 .matches_with_options(range.clone(), &self.text, options, |grammar| {
3722 grammar.text_object_config.as_ref().map(|c| &c.query)
3723 });
3724
3725 let configs = matches
3726 .grammars()
3727 .iter()
3728 .map(|grammar| grammar.text_object_config.as_ref())
3729 .collect::<Vec<_>>();
3730
3731 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
3732
3733 iter::from_fn(move || {
3734 loop {
3735 while let Some(capture) = captures.pop() {
3736 if capture.0.overlaps(&range) {
3737 return Some(capture);
3738 }
3739 }
3740
3741 let mat = matches.peek()?;
3742
3743 let Some(config) = configs[mat.grammar_index].as_ref() else {
3744 matches.advance();
3745 continue;
3746 };
3747
3748 for capture in mat.captures {
3749 let Some(ix) = config
3750 .text_objects_by_capture_ix
3751 .binary_search_by_key(&capture.index, |e| e.0)
3752 .ok()
3753 else {
3754 continue;
3755 };
3756 let text_object = config.text_objects_by_capture_ix[ix].1;
3757 let byte_range = capture.node.byte_range();
3758
3759 let mut found = false;
3760 for (range, existing) in captures.iter_mut() {
3761 if existing == &text_object {
3762 range.start = range.start.min(byte_range.start);
3763 range.end = range.end.max(byte_range.end);
3764 found = true;
3765 break;
3766 }
3767 }
3768
3769 if !found {
3770 captures.push((byte_range, text_object));
3771 }
3772 }
3773
3774 matches.advance();
3775 }
3776 })
3777 }
3778
3779 /// Returns enclosing bracket ranges containing the given range
3780 pub fn enclosing_bracket_ranges<T: ToOffset>(
3781 &self,
3782 range: Range<T>,
3783 ) -> impl Iterator<Item = BracketMatch> + '_ {
3784 let range = range.start.to_offset(self)..range.end.to_offset(self);
3785
3786 self.bracket_ranges(range.clone()).filter(move |pair| {
3787 pair.open_range.start <= range.start && pair.close_range.end >= range.end
3788 })
3789 }
3790
3791 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
3792 ///
3793 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
3794 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
3795 &self,
3796 range: Range<T>,
3797 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
3798 ) -> Option<(Range<usize>, Range<usize>)> {
3799 let range = range.start.to_offset(self)..range.end.to_offset(self);
3800
3801 // Get the ranges of the innermost pair of brackets.
3802 let mut result: Option<(Range<usize>, Range<usize>)> = None;
3803
3804 for pair in self.enclosing_bracket_ranges(range.clone()) {
3805 if let Some(range_filter) = range_filter {
3806 if !range_filter(pair.open_range.clone(), pair.close_range.clone()) {
3807 continue;
3808 }
3809 }
3810
3811 let len = pair.close_range.end - pair.open_range.start;
3812
3813 if let Some((existing_open, existing_close)) = &result {
3814 let existing_len = existing_close.end - existing_open.start;
3815 if len > existing_len {
3816 continue;
3817 }
3818 }
3819
3820 result = Some((pair.open_range, pair.close_range));
3821 }
3822
3823 result
3824 }
3825
3826 /// Returns anchor ranges for any matches of the redaction query.
3827 /// The buffer can be associated with multiple languages, and the redaction query associated with each
3828 /// will be run on the relevant section of the buffer.
3829 pub fn redacted_ranges<T: ToOffset>(
3830 &self,
3831 range: Range<T>,
3832 ) -> impl Iterator<Item = Range<usize>> + '_ {
3833 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3834 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3835 grammar
3836 .redactions_config
3837 .as_ref()
3838 .map(|config| &config.query)
3839 });
3840
3841 let configs = syntax_matches
3842 .grammars()
3843 .iter()
3844 .map(|grammar| grammar.redactions_config.as_ref())
3845 .collect::<Vec<_>>();
3846
3847 iter::from_fn(move || {
3848 let redacted_range = syntax_matches
3849 .peek()
3850 .and_then(|mat| {
3851 configs[mat.grammar_index].and_then(|config| {
3852 mat.captures
3853 .iter()
3854 .find(|capture| capture.index == config.redaction_capture_ix)
3855 })
3856 })
3857 .map(|mat| mat.node.byte_range());
3858 syntax_matches.advance();
3859 redacted_range
3860 })
3861 }
3862
3863 pub fn injections_intersecting_range<T: ToOffset>(
3864 &self,
3865 range: Range<T>,
3866 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
3867 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
3868
3869 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3870 grammar
3871 .injection_config
3872 .as_ref()
3873 .map(|config| &config.query)
3874 });
3875
3876 let configs = syntax_matches
3877 .grammars()
3878 .iter()
3879 .map(|grammar| grammar.injection_config.as_ref())
3880 .collect::<Vec<_>>();
3881
3882 iter::from_fn(move || {
3883 let ranges = syntax_matches.peek().and_then(|mat| {
3884 let config = &configs[mat.grammar_index]?;
3885 let content_capture_range = mat.captures.iter().find_map(|capture| {
3886 if capture.index == config.content_capture_ix {
3887 Some(capture.node.byte_range())
3888 } else {
3889 None
3890 }
3891 })?;
3892 let language = self.language_at(content_capture_range.start)?;
3893 Some((content_capture_range, language))
3894 });
3895 syntax_matches.advance();
3896 ranges
3897 })
3898 }
3899
3900 pub fn debug_variable_ranges(
3901 &self,
3902 offset_range: Range<usize>,
3903 ) -> impl Iterator<Item = DebugVariableRanges> + '_ {
3904 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3905 grammar
3906 .debug_variables_config
3907 .as_ref()
3908 .map(|config| &config.query)
3909 });
3910
3911 let configs = syntax_matches
3912 .grammars()
3913 .iter()
3914 .map(|grammar| grammar.debug_variables_config.as_ref())
3915 .collect::<Vec<_>>();
3916
3917 iter::from_fn(move || {
3918 loop {
3919 let mat = syntax_matches.peek()?;
3920
3921 let variable_ranges = configs[mat.grammar_index].and_then(|config| {
3922 let full_range = mat.captures.iter().fold(
3923 Range {
3924 start: usize::MAX,
3925 end: 0,
3926 },
3927 |mut acc, next| {
3928 let byte_range = next.node.byte_range();
3929 if acc.start > byte_range.start {
3930 acc.start = byte_range.start;
3931 }
3932 if acc.end < byte_range.end {
3933 acc.end = byte_range.end;
3934 }
3935 acc
3936 },
3937 );
3938 if full_range.start > full_range.end {
3939 // We did not find a full spanning range of this match.
3940 return None;
3941 }
3942
3943 let captures = mat.captures.iter().filter_map(|capture| {
3944 Some((
3945 capture,
3946 config.captures.get(capture.index as usize).cloned()?,
3947 ))
3948 });
3949
3950 let mut variable_range = None;
3951 for (query, capture) in captures {
3952 if let DebugVariableCapture::Variable = capture {
3953 let _ = variable_range.insert(query.node.byte_range());
3954 }
3955 }
3956
3957 Some(DebugVariableRanges {
3958 buffer_id: self.remote_id(),
3959 range: variable_range?,
3960 })
3961 });
3962
3963 syntax_matches.advance();
3964 if variable_ranges.is_some() {
3965 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
3966 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
3967 return variable_ranges;
3968 }
3969 }
3970 })
3971 }
3972
3973 pub fn runnable_ranges(
3974 &self,
3975 offset_range: Range<usize>,
3976 ) -> impl Iterator<Item = RunnableRange> + '_ {
3977 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
3978 grammar.runnable_config.as_ref().map(|config| &config.query)
3979 });
3980
3981 let test_configs = syntax_matches
3982 .grammars()
3983 .iter()
3984 .map(|grammar| grammar.runnable_config.as_ref())
3985 .collect::<Vec<_>>();
3986
3987 iter::from_fn(move || {
3988 loop {
3989 let mat = syntax_matches.peek()?;
3990
3991 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
3992 let mut run_range = None;
3993 let full_range = mat.captures.iter().fold(
3994 Range {
3995 start: usize::MAX,
3996 end: 0,
3997 },
3998 |mut acc, next| {
3999 let byte_range = next.node.byte_range();
4000 if acc.start > byte_range.start {
4001 acc.start = byte_range.start;
4002 }
4003 if acc.end < byte_range.end {
4004 acc.end = byte_range.end;
4005 }
4006 acc
4007 },
4008 );
4009 if full_range.start > full_range.end {
4010 // We did not find a full spanning range of this match.
4011 return None;
4012 }
4013 let extra_captures: SmallVec<[_; 1]> =
4014 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4015 test_configs
4016 .extra_captures
4017 .get(capture.index as usize)
4018 .cloned()
4019 .and_then(|tag_name| match tag_name {
4020 RunnableCapture::Named(name) => {
4021 Some((capture.node.byte_range(), name))
4022 }
4023 RunnableCapture::Run => {
4024 let _ = run_range.insert(capture.node.byte_range());
4025 None
4026 }
4027 })
4028 }));
4029 let run_range = run_range?;
4030 let tags = test_configs
4031 .query
4032 .property_settings(mat.pattern_index)
4033 .iter()
4034 .filter_map(|property| {
4035 if *property.key == *"tag" {
4036 property
4037 .value
4038 .as_ref()
4039 .map(|value| RunnableTag(value.to_string().into()))
4040 } else {
4041 None
4042 }
4043 })
4044 .collect();
4045 let extra_captures = extra_captures
4046 .into_iter()
4047 .map(|(range, name)| {
4048 (
4049 name.to_string(),
4050 self.text_for_range(range.clone()).collect::<String>(),
4051 )
4052 })
4053 .collect();
4054 // All tags should have the same range.
4055 Some(RunnableRange {
4056 run_range,
4057 full_range,
4058 runnable: Runnable {
4059 tags,
4060 language: mat.language,
4061 buffer: self.remote_id(),
4062 },
4063 extra_captures,
4064 buffer_id: self.remote_id(),
4065 })
4066 });
4067
4068 syntax_matches.advance();
4069 if test_range.is_some() {
4070 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4071 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4072 return test_range;
4073 }
4074 }
4075 })
4076 }
4077
4078 /// Returns selections for remote peers intersecting the given range.
4079 #[allow(clippy::type_complexity)]
4080 pub fn selections_in_range(
4081 &self,
4082 range: Range<Anchor>,
4083 include_local: bool,
4084 ) -> impl Iterator<
4085 Item = (
4086 ReplicaId,
4087 bool,
4088 CursorShape,
4089 impl Iterator<Item = &Selection<Anchor>> + '_,
4090 ),
4091 > + '_ {
4092 self.remote_selections
4093 .iter()
4094 .filter(move |(replica_id, set)| {
4095 (include_local || **replica_id != self.text.replica_id())
4096 && !set.selections.is_empty()
4097 })
4098 .map(move |(replica_id, set)| {
4099 let start_ix = match set.selections.binary_search_by(|probe| {
4100 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4101 }) {
4102 Ok(ix) | Err(ix) => ix,
4103 };
4104 let end_ix = match set.selections.binary_search_by(|probe| {
4105 probe.start.cmp(&range.end, self).then(Ordering::Less)
4106 }) {
4107 Ok(ix) | Err(ix) => ix,
4108 };
4109
4110 (
4111 *replica_id,
4112 set.line_mode,
4113 set.cursor_shape,
4114 set.selections[start_ix..end_ix].iter(),
4115 )
4116 })
4117 }
4118
4119 /// Returns if the buffer contains any diagnostics.
4120 pub fn has_diagnostics(&self) -> bool {
4121 !self.diagnostics.is_empty()
4122 }
4123
4124 /// Returns all the diagnostics intersecting the given range.
4125 pub fn diagnostics_in_range<'a, T, O>(
4126 &'a self,
4127 search_range: Range<T>,
4128 reversed: bool,
4129 ) -> impl 'a + Iterator<Item = DiagnosticEntry<O>>
4130 where
4131 T: 'a + Clone + ToOffset,
4132 O: 'a + FromAnchor,
4133 {
4134 let mut iterators: Vec<_> = self
4135 .diagnostics
4136 .iter()
4137 .map(|(_, collection)| {
4138 collection
4139 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4140 .peekable()
4141 })
4142 .collect();
4143
4144 std::iter::from_fn(move || {
4145 let (next_ix, _) = iterators
4146 .iter_mut()
4147 .enumerate()
4148 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4149 .min_by(|(_, a), (_, b)| {
4150 let cmp = a
4151 .range
4152 .start
4153 .cmp(&b.range.start, self)
4154 // when range is equal, sort by diagnostic severity
4155 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4156 // and stabilize order with group_id
4157 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4158 if reversed { cmp.reverse() } else { cmp }
4159 })?;
4160 iterators[next_ix]
4161 .next()
4162 .map(|DiagnosticEntry { range, diagnostic }| DiagnosticEntry {
4163 diagnostic,
4164 range: FromAnchor::from_anchor(&range.start, self)
4165 ..FromAnchor::from_anchor(&range.end, self),
4166 })
4167 })
4168 }
4169
4170 /// Returns all the diagnostic groups associated with the given
4171 /// language server ID. If no language server ID is provided,
4172 /// all diagnostics groups are returned.
4173 pub fn diagnostic_groups(
4174 &self,
4175 language_server_id: Option<LanguageServerId>,
4176 ) -> Vec<(LanguageServerId, DiagnosticGroup<Anchor>)> {
4177 let mut groups = Vec::new();
4178
4179 if let Some(language_server_id) = language_server_id {
4180 if let Ok(ix) = self
4181 .diagnostics
4182 .binary_search_by_key(&language_server_id, |e| e.0)
4183 {
4184 self.diagnostics[ix]
4185 .1
4186 .groups(language_server_id, &mut groups, self);
4187 }
4188 } else {
4189 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4190 diagnostics.groups(*language_server_id, &mut groups, self);
4191 }
4192 }
4193
4194 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4195 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4196 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4197 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4198 });
4199
4200 groups
4201 }
4202
4203 /// Returns an iterator over the diagnostics for the given group.
4204 pub fn diagnostic_group<O>(
4205 &self,
4206 group_id: usize,
4207 ) -> impl Iterator<Item = DiagnosticEntry<O>> + '_
4208 where
4209 O: FromAnchor + 'static,
4210 {
4211 self.diagnostics
4212 .iter()
4213 .flat_map(move |(_, set)| set.group(group_id, self))
4214 }
4215
4216 /// An integer version number that accounts for all updates besides
4217 /// the buffer's text itself (which is versioned via a version vector).
4218 pub fn non_text_state_update_count(&self) -> usize {
4219 self.non_text_state_update_count
4220 }
4221
4222 /// Returns a snapshot of underlying file.
4223 pub fn file(&self) -> Option<&Arc<dyn File>> {
4224 self.file.as_ref()
4225 }
4226
4227 /// Resolves the file path (relative to the worktree root) associated with the underlying file.
4228 pub fn resolve_file_path(&self, cx: &App, include_root: bool) -> Option<PathBuf> {
4229 if let Some(file) = self.file() {
4230 if file.path().file_name().is_none() || include_root {
4231 Some(file.full_path(cx))
4232 } else {
4233 Some(file.path().to_path_buf())
4234 }
4235 } else {
4236 None
4237 }
4238 }
4239
4240 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4241 let query_str = query.fuzzy_contents;
4242 if query_str.map_or(false, |query| query.is_empty()) {
4243 return BTreeMap::default();
4244 }
4245
4246 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4247 language,
4248 override_id: None,
4249 }));
4250
4251 let mut query_ix = 0;
4252 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4253 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4254
4255 let mut words = BTreeMap::default();
4256 let mut current_word_start_ix = None;
4257 let mut chunk_ix = query.range.start;
4258 for chunk in self.chunks(query.range, false) {
4259 for (i, c) in chunk.text.char_indices() {
4260 let ix = chunk_ix + i;
4261 if classifier.is_word(c) {
4262 if current_word_start_ix.is_none() {
4263 current_word_start_ix = Some(ix);
4264 }
4265
4266 if let Some(query_chars) = &query_chars {
4267 if query_ix < query_len {
4268 if c.to_lowercase().eq(query_chars[query_ix].to_lowercase()) {
4269 query_ix += 1;
4270 }
4271 }
4272 }
4273 continue;
4274 } else if let Some(word_start) = current_word_start_ix.take() {
4275 if query_ix == query_len {
4276 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4277 let mut word_text = self.text_for_range(word_start..ix).peekable();
4278 let first_char = word_text
4279 .peek()
4280 .and_then(|first_chunk| first_chunk.chars().next());
4281 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4282 if !query.skip_digits
4283 || first_char.map_or(true, |first_char| !first_char.is_digit(10))
4284 {
4285 words.insert(word_text.collect(), word_range);
4286 }
4287 }
4288 }
4289 query_ix = 0;
4290 }
4291 chunk_ix += chunk.text.len();
4292 }
4293
4294 words
4295 }
4296}
4297
4298pub struct WordsQuery<'a> {
4299 /// Only returns words with all chars from the fuzzy string in them.
4300 pub fuzzy_contents: Option<&'a str>,
4301 /// Skips words that start with a digit.
4302 pub skip_digits: bool,
4303 /// Buffer offset range, to look for words.
4304 pub range: Range<usize>,
4305}
4306
4307fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4308 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4309}
4310
4311fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4312 let mut result = IndentSize::spaces(0);
4313 for c in text {
4314 let kind = match c {
4315 ' ' => IndentKind::Space,
4316 '\t' => IndentKind::Tab,
4317 _ => break,
4318 };
4319 if result.len == 0 {
4320 result.kind = kind;
4321 }
4322 result.len += 1;
4323 }
4324 result
4325}
4326
4327impl Clone for BufferSnapshot {
4328 fn clone(&self) -> Self {
4329 Self {
4330 text: self.text.clone(),
4331 syntax: self.syntax.clone(),
4332 file: self.file.clone(),
4333 remote_selections: self.remote_selections.clone(),
4334 diagnostics: self.diagnostics.clone(),
4335 language: self.language.clone(),
4336 non_text_state_update_count: self.non_text_state_update_count,
4337 }
4338 }
4339}
4340
4341impl Deref for BufferSnapshot {
4342 type Target = text::BufferSnapshot;
4343
4344 fn deref(&self) -> &Self::Target {
4345 &self.text
4346 }
4347}
4348
4349unsafe impl Send for BufferChunks<'_> {}
4350
4351impl<'a> BufferChunks<'a> {
4352 pub(crate) fn new(
4353 text: &'a Rope,
4354 range: Range<usize>,
4355 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4356 diagnostics: bool,
4357 buffer_snapshot: Option<&'a BufferSnapshot>,
4358 ) -> Self {
4359 let mut highlights = None;
4360 if let Some((captures, highlight_maps)) = syntax {
4361 highlights = Some(BufferChunkHighlights {
4362 captures,
4363 next_capture: None,
4364 stack: Default::default(),
4365 highlight_maps,
4366 })
4367 }
4368
4369 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4370 let chunks = text.chunks_in_range(range.clone());
4371
4372 let mut this = BufferChunks {
4373 range,
4374 buffer_snapshot,
4375 chunks,
4376 diagnostic_endpoints,
4377 error_depth: 0,
4378 warning_depth: 0,
4379 information_depth: 0,
4380 hint_depth: 0,
4381 unnecessary_depth: 0,
4382 highlights,
4383 };
4384 this.initialize_diagnostic_endpoints();
4385 this
4386 }
4387
4388 /// Seeks to the given byte offset in the buffer.
4389 pub fn seek(&mut self, range: Range<usize>) {
4390 let old_range = std::mem::replace(&mut self.range, range.clone());
4391 self.chunks.set_range(self.range.clone());
4392 if let Some(highlights) = self.highlights.as_mut() {
4393 if old_range.start <= self.range.start && old_range.end >= self.range.end {
4394 // Reuse existing highlights stack, as the new range is a subrange of the old one.
4395 highlights
4396 .stack
4397 .retain(|(end_offset, _)| *end_offset > range.start);
4398 if let Some(capture) = &highlights.next_capture {
4399 if range.start >= capture.node.start_byte() {
4400 let next_capture_end = capture.node.end_byte();
4401 if range.start < next_capture_end {
4402 highlights.stack.push((
4403 next_capture_end,
4404 highlights.highlight_maps[capture.grammar_index].get(capture.index),
4405 ));
4406 }
4407 highlights.next_capture.take();
4408 }
4409 }
4410 } else if let Some(snapshot) = self.buffer_snapshot {
4411 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
4412 *highlights = BufferChunkHighlights {
4413 captures,
4414 next_capture: None,
4415 stack: Default::default(),
4416 highlight_maps,
4417 };
4418 } else {
4419 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
4420 // Seeking such BufferChunks is not supported.
4421 debug_assert!(
4422 false,
4423 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
4424 );
4425 }
4426
4427 highlights.captures.set_byte_range(self.range.clone());
4428 self.initialize_diagnostic_endpoints();
4429 }
4430 }
4431
4432 fn initialize_diagnostic_endpoints(&mut self) {
4433 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut() {
4434 if let Some(buffer) = self.buffer_snapshot {
4435 let mut diagnostic_endpoints = Vec::new();
4436 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
4437 diagnostic_endpoints.push(DiagnosticEndpoint {
4438 offset: entry.range.start,
4439 is_start: true,
4440 severity: entry.diagnostic.severity,
4441 is_unnecessary: entry.diagnostic.is_unnecessary,
4442 });
4443 diagnostic_endpoints.push(DiagnosticEndpoint {
4444 offset: entry.range.end,
4445 is_start: false,
4446 severity: entry.diagnostic.severity,
4447 is_unnecessary: entry.diagnostic.is_unnecessary,
4448 });
4449 }
4450 diagnostic_endpoints
4451 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
4452 *diagnostics = diagnostic_endpoints.into_iter().peekable();
4453 self.hint_depth = 0;
4454 self.error_depth = 0;
4455 self.warning_depth = 0;
4456 self.information_depth = 0;
4457 }
4458 }
4459 }
4460
4461 /// The current byte offset in the buffer.
4462 pub fn offset(&self) -> usize {
4463 self.range.start
4464 }
4465
4466 pub fn range(&self) -> Range<usize> {
4467 self.range.clone()
4468 }
4469
4470 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
4471 let depth = match endpoint.severity {
4472 DiagnosticSeverity::ERROR => &mut self.error_depth,
4473 DiagnosticSeverity::WARNING => &mut self.warning_depth,
4474 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
4475 DiagnosticSeverity::HINT => &mut self.hint_depth,
4476 _ => return,
4477 };
4478 if endpoint.is_start {
4479 *depth += 1;
4480 } else {
4481 *depth -= 1;
4482 }
4483
4484 if endpoint.is_unnecessary {
4485 if endpoint.is_start {
4486 self.unnecessary_depth += 1;
4487 } else {
4488 self.unnecessary_depth -= 1;
4489 }
4490 }
4491 }
4492
4493 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
4494 if self.error_depth > 0 {
4495 Some(DiagnosticSeverity::ERROR)
4496 } else if self.warning_depth > 0 {
4497 Some(DiagnosticSeverity::WARNING)
4498 } else if self.information_depth > 0 {
4499 Some(DiagnosticSeverity::INFORMATION)
4500 } else if self.hint_depth > 0 {
4501 Some(DiagnosticSeverity::HINT)
4502 } else {
4503 None
4504 }
4505 }
4506
4507 fn current_code_is_unnecessary(&self) -> bool {
4508 self.unnecessary_depth > 0
4509 }
4510}
4511
4512impl<'a> Iterator for BufferChunks<'a> {
4513 type Item = Chunk<'a>;
4514
4515 fn next(&mut self) -> Option<Self::Item> {
4516 let mut next_capture_start = usize::MAX;
4517 let mut next_diagnostic_endpoint = usize::MAX;
4518
4519 if let Some(highlights) = self.highlights.as_mut() {
4520 while let Some((parent_capture_end, _)) = highlights.stack.last() {
4521 if *parent_capture_end <= self.range.start {
4522 highlights.stack.pop();
4523 } else {
4524 break;
4525 }
4526 }
4527
4528 if highlights.next_capture.is_none() {
4529 highlights.next_capture = highlights.captures.next();
4530 }
4531
4532 while let Some(capture) = highlights.next_capture.as_ref() {
4533 if self.range.start < capture.node.start_byte() {
4534 next_capture_start = capture.node.start_byte();
4535 break;
4536 } else {
4537 let highlight_id =
4538 highlights.highlight_maps[capture.grammar_index].get(capture.index);
4539 highlights
4540 .stack
4541 .push((capture.node.end_byte(), highlight_id));
4542 highlights.next_capture = highlights.captures.next();
4543 }
4544 }
4545 }
4546
4547 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
4548 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
4549 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
4550 if endpoint.offset <= self.range.start {
4551 self.update_diagnostic_depths(endpoint);
4552 diagnostic_endpoints.next();
4553 } else {
4554 next_diagnostic_endpoint = endpoint.offset;
4555 break;
4556 }
4557 }
4558 }
4559 self.diagnostic_endpoints = diagnostic_endpoints;
4560
4561 if let Some(chunk) = self.chunks.peek() {
4562 let chunk_start = self.range.start;
4563 let mut chunk_end = (self.chunks.offset() + chunk.len())
4564 .min(next_capture_start)
4565 .min(next_diagnostic_endpoint);
4566 let mut highlight_id = None;
4567 if let Some(highlights) = self.highlights.as_ref() {
4568 if let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last() {
4569 chunk_end = chunk_end.min(*parent_capture_end);
4570 highlight_id = Some(*parent_highlight_id);
4571 }
4572 }
4573
4574 let slice =
4575 &chunk[chunk_start - self.chunks.offset()..chunk_end - self.chunks.offset()];
4576 self.range.start = chunk_end;
4577 if self.range.start == self.chunks.offset() + chunk.len() {
4578 self.chunks.next().unwrap();
4579 }
4580
4581 Some(Chunk {
4582 text: slice,
4583 syntax_highlight_id: highlight_id,
4584 diagnostic_severity: self.current_diagnostic_severity(),
4585 is_unnecessary: self.current_code_is_unnecessary(),
4586 ..Default::default()
4587 })
4588 } else {
4589 None
4590 }
4591 }
4592}
4593
4594impl operation_queue::Operation for Operation {
4595 fn lamport_timestamp(&self) -> clock::Lamport {
4596 match self {
4597 Operation::Buffer(_) => {
4598 unreachable!("buffer operations should never be deferred at this layer")
4599 }
4600 Operation::UpdateDiagnostics {
4601 lamport_timestamp, ..
4602 }
4603 | Operation::UpdateSelections {
4604 lamport_timestamp, ..
4605 }
4606 | Operation::UpdateCompletionTriggers {
4607 lamport_timestamp, ..
4608 } => *lamport_timestamp,
4609 }
4610 }
4611}
4612
4613impl Default for Diagnostic {
4614 fn default() -> Self {
4615 Self {
4616 source: Default::default(),
4617 code: None,
4618 code_description: None,
4619 severity: DiagnosticSeverity::ERROR,
4620 message: Default::default(),
4621 markdown: None,
4622 group_id: 0,
4623 is_primary: false,
4624 is_disk_based: false,
4625 is_unnecessary: false,
4626 data: None,
4627 }
4628 }
4629}
4630
4631impl IndentSize {
4632 /// Returns an [`IndentSize`] representing the given spaces.
4633 pub fn spaces(len: u32) -> Self {
4634 Self {
4635 len,
4636 kind: IndentKind::Space,
4637 }
4638 }
4639
4640 /// Returns an [`IndentSize`] representing a tab.
4641 pub fn tab() -> Self {
4642 Self {
4643 len: 1,
4644 kind: IndentKind::Tab,
4645 }
4646 }
4647
4648 /// An iterator over the characters represented by this [`IndentSize`].
4649 pub fn chars(&self) -> impl Iterator<Item = char> {
4650 iter::repeat(self.char()).take(self.len as usize)
4651 }
4652
4653 /// The character representation of this [`IndentSize`].
4654 pub fn char(&self) -> char {
4655 match self.kind {
4656 IndentKind::Space => ' ',
4657 IndentKind::Tab => '\t',
4658 }
4659 }
4660
4661 /// Consumes the current [`IndentSize`] and returns a new one that has
4662 /// been shrunk or enlarged by the given size along the given direction.
4663 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
4664 match direction {
4665 Ordering::Less => {
4666 if self.kind == size.kind && self.len >= size.len {
4667 self.len -= size.len;
4668 }
4669 }
4670 Ordering::Equal => {}
4671 Ordering::Greater => {
4672 if self.len == 0 {
4673 self = size;
4674 } else if self.kind == size.kind {
4675 self.len += size.len;
4676 }
4677 }
4678 }
4679 self
4680 }
4681
4682 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
4683 match self.kind {
4684 IndentKind::Space => self.len as usize,
4685 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
4686 }
4687 }
4688}
4689
4690#[cfg(any(test, feature = "test-support"))]
4691pub struct TestFile {
4692 pub path: Arc<Path>,
4693 pub root_name: String,
4694 pub local_root: Option<PathBuf>,
4695}
4696
4697#[cfg(any(test, feature = "test-support"))]
4698impl File for TestFile {
4699 fn path(&self) -> &Arc<Path> {
4700 &self.path
4701 }
4702
4703 fn full_path(&self, _: &gpui::App) -> PathBuf {
4704 PathBuf::from(&self.root_name).join(self.path.as_ref())
4705 }
4706
4707 fn as_local(&self) -> Option<&dyn LocalFile> {
4708 if self.local_root.is_some() {
4709 Some(self)
4710 } else {
4711 None
4712 }
4713 }
4714
4715 fn disk_state(&self) -> DiskState {
4716 unimplemented!()
4717 }
4718
4719 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a std::ffi::OsStr {
4720 self.path().file_name().unwrap_or(self.root_name.as_ref())
4721 }
4722
4723 fn worktree_id(&self, _: &App) -> WorktreeId {
4724 WorktreeId::from_usize(0)
4725 }
4726
4727 fn to_proto(&self, _: &App) -> rpc::proto::File {
4728 unimplemented!()
4729 }
4730
4731 fn is_private(&self) -> bool {
4732 false
4733 }
4734}
4735
4736#[cfg(any(test, feature = "test-support"))]
4737impl LocalFile for TestFile {
4738 fn abs_path(&self, _cx: &App) -> PathBuf {
4739 PathBuf::from(self.local_root.as_ref().unwrap())
4740 .join(&self.root_name)
4741 .join(self.path.as_ref())
4742 }
4743
4744 fn load(&self, _cx: &App) -> Task<Result<String>> {
4745 unimplemented!()
4746 }
4747
4748 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
4749 unimplemented!()
4750 }
4751}
4752
4753pub(crate) fn contiguous_ranges(
4754 values: impl Iterator<Item = u32>,
4755 max_len: usize,
4756) -> impl Iterator<Item = Range<u32>> {
4757 let mut values = values;
4758 let mut current_range: Option<Range<u32>> = None;
4759 std::iter::from_fn(move || {
4760 loop {
4761 if let Some(value) = values.next() {
4762 if let Some(range) = &mut current_range {
4763 if value == range.end && range.len() < max_len {
4764 range.end += 1;
4765 continue;
4766 }
4767 }
4768
4769 let prev_range = current_range.clone();
4770 current_range = Some(value..(value + 1));
4771 if prev_range.is_some() {
4772 return prev_range;
4773 }
4774 } else {
4775 return current_range.take();
4776 }
4777 }
4778 })
4779}
4780
4781#[derive(Default, Debug)]
4782pub struct CharClassifier {
4783 scope: Option<LanguageScope>,
4784 for_completion: bool,
4785 ignore_punctuation: bool,
4786}
4787
4788impl CharClassifier {
4789 pub fn new(scope: Option<LanguageScope>) -> Self {
4790 Self {
4791 scope,
4792 for_completion: false,
4793 ignore_punctuation: false,
4794 }
4795 }
4796
4797 pub fn for_completion(self, for_completion: bool) -> Self {
4798 Self {
4799 for_completion,
4800 ..self
4801 }
4802 }
4803
4804 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
4805 Self {
4806 ignore_punctuation,
4807 ..self
4808 }
4809 }
4810
4811 pub fn is_whitespace(&self, c: char) -> bool {
4812 self.kind(c) == CharKind::Whitespace
4813 }
4814
4815 pub fn is_word(&self, c: char) -> bool {
4816 self.kind(c) == CharKind::Word
4817 }
4818
4819 pub fn is_punctuation(&self, c: char) -> bool {
4820 self.kind(c) == CharKind::Punctuation
4821 }
4822
4823 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
4824 if c.is_alphanumeric() || c == '_' {
4825 return CharKind::Word;
4826 }
4827
4828 if let Some(scope) = &self.scope {
4829 let characters = if self.for_completion {
4830 scope.completion_query_characters()
4831 } else {
4832 scope.word_characters()
4833 };
4834 if let Some(characters) = characters {
4835 if characters.contains(&c) {
4836 return CharKind::Word;
4837 }
4838 }
4839 }
4840
4841 if c.is_whitespace() {
4842 return CharKind::Whitespace;
4843 }
4844
4845 if ignore_punctuation {
4846 CharKind::Word
4847 } else {
4848 CharKind::Punctuation
4849 }
4850 }
4851
4852 pub fn kind(&self, c: char) -> CharKind {
4853 self.kind_with(c, self.ignore_punctuation)
4854 }
4855}
4856
4857/// Find all of the ranges of whitespace that occur at the ends of lines
4858/// in the given rope.
4859///
4860/// This could also be done with a regex search, but this implementation
4861/// avoids copying text.
4862pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
4863 let mut ranges = Vec::new();
4864
4865 let mut offset = 0;
4866 let mut prev_chunk_trailing_whitespace_range = 0..0;
4867 for chunk in rope.chunks() {
4868 let mut prev_line_trailing_whitespace_range = 0..0;
4869 for (i, line) in chunk.split('\n').enumerate() {
4870 let line_end_offset = offset + line.len();
4871 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
4872 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
4873
4874 if i == 0 && trimmed_line_len == 0 {
4875 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
4876 }
4877 if !prev_line_trailing_whitespace_range.is_empty() {
4878 ranges.push(prev_line_trailing_whitespace_range);
4879 }
4880
4881 offset = line_end_offset + 1;
4882 prev_line_trailing_whitespace_range = trailing_whitespace_range;
4883 }
4884
4885 offset -= 1;
4886 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
4887 }
4888
4889 if !prev_chunk_trailing_whitespace_range.is_empty() {
4890 ranges.push(prev_chunk_trailing_whitespace_range);
4891 }
4892
4893 ranges
4894}