1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
5 TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures, SyntaxMapMatch,
12 SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16};
17pub use crate::{
18 Grammar, Language, LanguageRegistry,
19 diagnostic_set::DiagnosticSet,
20 highlight_map::{HighlightId, HighlightMap},
21 proto,
22};
23use anyhow::{Context as _, Result};
24pub use clock::ReplicaId;
25use clock::{Global, Lamport};
26use collections::{HashMap, HashSet};
27use fs::MTime;
28use futures::channel::oneshot;
29use gpui::{
30 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
31 Task, TaskLabel, TextStyle,
32};
33
34use lsp::{LanguageServerId, NumberOrString};
35use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
36use serde::{Deserialize, Serialize};
37use serde_json::Value;
38use settings::WorktreeId;
39use smallvec::SmallVec;
40use smol::future::yield_now;
41use std::{
42 any::Any,
43 borrow::Cow,
44 cell::Cell,
45 cmp::{self, Ordering, Reverse},
46 collections::{BTreeMap, BTreeSet},
47 future::Future,
48 iter::{self, Iterator, Peekable},
49 mem,
50 num::NonZeroU32,
51 ops::{Deref, Not, Range},
52 path::PathBuf,
53 rc,
54 sync::{Arc, LazyLock},
55 time::{Duration, Instant},
56 vec,
57};
58use sum_tree::TreeMap;
59use text::operation_queue::OperationQueue;
60use text::*;
61pub use text::{
62 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
63 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
64 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
65 ToPointUtf16, Transaction, TransactionId, Unclipped,
66};
67use theme::{ActiveTheme as _, SyntaxTheme};
68#[cfg(any(test, feature = "test-support"))]
69use util::RandomCharIter;
70use util::{RangeExt, debug_panic, maybe, paths::PathStyle, post_inc, rel_path::RelPath};
71
72#[cfg(any(test, feature = "test-support"))]
73pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
74
75pub use lsp::DiagnosticSeverity;
76
77/// A label for the background task spawned by the buffer to compute
78/// a diff against the contents of its file.
79pub static BUFFER_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
80
81/// Indicate whether a [`Buffer`] has permissions to edit.
82#[derive(PartialEq, Clone, Copy, Debug)]
83pub enum Capability {
84 /// The buffer is a mutable replica.
85 ReadWrite,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90pub type BufferRow = u32;
91
92/// An in-memory representation of a source code file, including its text,
93/// syntax trees, git status, and diagnostics.
94pub struct Buffer {
95 text: TextBuffer,
96 branch_state: Option<BufferBranchState>,
97 /// Filesystem state, `None` when there is no path.
98 file: Option<Arc<dyn File>>,
99 /// The mtime of the file when this buffer was last loaded from
100 /// or saved to disk.
101 saved_mtime: Option<MTime>,
102 /// The version vector when this buffer was last loaded from
103 /// or saved to disk.
104 saved_version: clock::Global,
105 preview_version: clock::Global,
106 transaction_depth: usize,
107 was_dirty_before_starting_transaction: Option<bool>,
108 reload_task: Option<Task<Result<()>>>,
109 language: Option<Arc<Language>>,
110 autoindent_requests: Vec<Arc<AutoindentRequest>>,
111 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
112 pending_autoindent: Option<Task<()>>,
113 sync_parse_timeout: Duration,
114 syntax_map: Mutex<SyntaxMap>,
115 reparse: Option<Task<()>>,
116 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
117 non_text_state_update_count: usize,
118 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
119 remote_selections: TreeMap<ReplicaId, SelectionSet>,
120 diagnostics_timestamp: clock::Lamport,
121 completion_triggers: BTreeSet<String>,
122 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
123 completion_triggers_timestamp: clock::Lamport,
124 deferred_ops: OperationQueue<Operation>,
125 capability: Capability,
126 has_conflict: bool,
127 /// Memoize calls to has_changes_since(saved_version).
128 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
129 has_unsaved_edits: Cell<(clock::Global, bool)>,
130 change_bits: Vec<rc::Weak<Cell<bool>>>,
131 _subscriptions: Vec<gpui::Subscription>,
132 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
133}
134
135#[derive(Debug, Clone)]
136pub struct TreeSitterData {
137 chunks: RowChunks,
138 brackets_by_chunks: Vec<Option<Vec<BracketMatch>>>,
139}
140
141const MAX_ROWS_IN_A_CHUNK: u32 = 50;
142
143impl TreeSitterData {
144 fn clear(&mut self) {
145 self.brackets_by_chunks = vec![None; self.chunks.len()];
146 }
147
148 fn new(snapshot: text::BufferSnapshot) -> Self {
149 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
150 Self {
151 brackets_by_chunks: vec![None; chunks.len()],
152 chunks,
153 }
154 }
155}
156
157#[derive(Copy, Clone, Debug, PartialEq, Eq)]
158pub enum ParseStatus {
159 Idle,
160 Parsing,
161}
162
163struct BufferBranchState {
164 base_buffer: Entity<Buffer>,
165 merged_operations: Vec<Lamport>,
166}
167
168/// An immutable, cheaply cloneable representation of a fixed
169/// state of a buffer.
170pub struct BufferSnapshot {
171 pub text: text::BufferSnapshot,
172 pub syntax: SyntaxSnapshot,
173 file: Option<Arc<dyn File>>,
174 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
175 remote_selections: TreeMap<ReplicaId, SelectionSet>,
176 language: Option<Arc<Language>>,
177 non_text_state_update_count: usize,
178 tree_sitter_data: Arc<Mutex<TreeSitterData>>,
179}
180
181/// The kind and amount of indentation in a particular line. For now,
182/// assumes that indentation is all the same character.
183#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
184pub struct IndentSize {
185 /// The number of bytes that comprise the indentation.
186 pub len: u32,
187 /// The kind of whitespace used for indentation.
188 pub kind: IndentKind,
189}
190
191/// A whitespace character that's used for indentation.
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
193pub enum IndentKind {
194 /// An ASCII space character.
195 #[default]
196 Space,
197 /// An ASCII tab character.
198 Tab,
199}
200
201/// The shape of a selection cursor.
202#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
203pub enum CursorShape {
204 /// A vertical bar
205 #[default]
206 Bar,
207 /// A block that surrounds the following character
208 Block,
209 /// An underline that runs along the following character
210 Underline,
211 /// A box drawn around the following character
212 Hollow,
213}
214
215impl From<settings::CursorShape> for CursorShape {
216 fn from(shape: settings::CursorShape) -> Self {
217 match shape {
218 settings::CursorShape::Bar => CursorShape::Bar,
219 settings::CursorShape::Block => CursorShape::Block,
220 settings::CursorShape::Underline => CursorShape::Underline,
221 settings::CursorShape::Hollow => CursorShape::Hollow,
222 }
223 }
224}
225
226#[derive(Clone, Debug)]
227struct SelectionSet {
228 line_mode: bool,
229 cursor_shape: CursorShape,
230 selections: Arc<[Selection<Anchor>]>,
231 lamport_timestamp: clock::Lamport,
232}
233
234/// A diagnostic associated with a certain range of a buffer.
235#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
236pub struct Diagnostic {
237 /// The name of the service that produced this diagnostic.
238 pub source: Option<String>,
239 /// A machine-readable code that identifies this diagnostic.
240 pub code: Option<NumberOrString>,
241 pub code_description: Option<lsp::Uri>,
242 /// Whether this diagnostic is a hint, warning, or error.
243 pub severity: DiagnosticSeverity,
244 /// The human-readable message associated with this diagnostic.
245 pub message: String,
246 /// The human-readable message (in markdown format)
247 pub markdown: Option<String>,
248 /// An id that identifies the group to which this diagnostic belongs.
249 ///
250 /// When a language server produces a diagnostic with
251 /// one or more associated diagnostics, those diagnostics are all
252 /// assigned a single group ID.
253 pub group_id: usize,
254 /// Whether this diagnostic is the primary diagnostic for its group.
255 ///
256 /// In a given group, the primary diagnostic is the top-level diagnostic
257 /// returned by the language server. The non-primary diagnostics are the
258 /// associated diagnostics.
259 pub is_primary: bool,
260 /// Whether this diagnostic is considered to originate from an analysis of
261 /// files on disk, as opposed to any unsaved buffer contents. This is a
262 /// property of a given diagnostic source, and is configured for a given
263 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
264 /// for the language server.
265 pub is_disk_based: bool,
266 /// Whether this diagnostic marks unnecessary code.
267 pub is_unnecessary: bool,
268 /// Quick separation of diagnostics groups based by their source.
269 pub source_kind: DiagnosticSourceKind,
270 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
271 pub data: Option<Value>,
272 /// Whether to underline the corresponding text range in the editor.
273 pub underline: bool,
274}
275
276#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
277pub enum DiagnosticSourceKind {
278 Pulled,
279 Pushed,
280 Other,
281}
282
283/// An operation used to synchronize this buffer with its other replicas.
284#[derive(Clone, Debug, PartialEq)]
285pub enum Operation {
286 /// A text operation.
287 Buffer(text::Operation),
288
289 /// An update to the buffer's diagnostics.
290 UpdateDiagnostics {
291 /// The id of the language server that produced the new diagnostics.
292 server_id: LanguageServerId,
293 /// The diagnostics.
294 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
295 /// The buffer's lamport timestamp.
296 lamport_timestamp: clock::Lamport,
297 },
298
299 /// An update to the most recent selections in this buffer.
300 UpdateSelections {
301 /// The selections.
302 selections: Arc<[Selection<Anchor>]>,
303 /// The buffer's lamport timestamp.
304 lamport_timestamp: clock::Lamport,
305 /// Whether the selections are in 'line mode'.
306 line_mode: bool,
307 /// The [`CursorShape`] associated with these selections.
308 cursor_shape: CursorShape,
309 },
310
311 /// An update to the characters that should trigger autocompletion
312 /// for this buffer.
313 UpdateCompletionTriggers {
314 /// The characters that trigger autocompletion.
315 triggers: Vec<String>,
316 /// The buffer's lamport timestamp.
317 lamport_timestamp: clock::Lamport,
318 /// The language server ID.
319 server_id: LanguageServerId,
320 },
321
322 /// An update to the line ending type of this buffer.
323 UpdateLineEnding {
324 /// The line ending type.
325 line_ending: LineEnding,
326 /// The buffer's lamport timestamp.
327 lamport_timestamp: clock::Lamport,
328 },
329}
330
331/// An event that occurs in a buffer.
332#[derive(Clone, Debug, PartialEq)]
333pub enum BufferEvent {
334 /// The buffer was changed in a way that must be
335 /// propagated to its other replicas.
336 Operation {
337 operation: Operation,
338 is_local: bool,
339 },
340 /// The buffer was edited.
341 Edited,
342 /// The buffer's `dirty` bit changed.
343 DirtyChanged,
344 /// The buffer was saved.
345 Saved,
346 /// The buffer's file was changed on disk.
347 FileHandleChanged,
348 /// The buffer was reloaded.
349 Reloaded,
350 /// The buffer is in need of a reload
351 ReloadNeeded,
352 /// The buffer's language was changed.
353 LanguageChanged,
354 /// The buffer's syntax trees were updated.
355 Reparsed,
356 /// The buffer's diagnostics were updated.
357 DiagnosticsUpdated,
358 /// The buffer gained or lost editing capabilities.
359 CapabilityChanged,
360}
361
362/// The file associated with a buffer.
363pub trait File: Send + Sync + Any {
364 /// Returns the [`LocalFile`] associated with this file, if the
365 /// file is local.
366 fn as_local(&self) -> Option<&dyn LocalFile>;
367
368 /// Returns whether this file is local.
369 fn is_local(&self) -> bool {
370 self.as_local().is_some()
371 }
372
373 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
374 /// only available in some states, such as modification time.
375 fn disk_state(&self) -> DiskState;
376
377 /// Returns the path of this file relative to the worktree's root directory.
378 fn path(&self) -> &Arc<RelPath>;
379
380 /// Returns the path of this file relative to the worktree's parent directory (this means it
381 /// includes the name of the worktree's root folder).
382 fn full_path(&self, cx: &App) -> PathBuf;
383
384 /// Returns the path style of this file.
385 fn path_style(&self, cx: &App) -> PathStyle;
386
387 /// Returns the last component of this handle's absolute path. If this handle refers to the root
388 /// of its worktree, then this method will return the name of the worktree itself.
389 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
390
391 /// Returns the id of the worktree to which this file belongs.
392 ///
393 /// This is needed for looking up project-specific settings.
394 fn worktree_id(&self, cx: &App) -> WorktreeId;
395
396 /// Converts this file into a protobuf message.
397 fn to_proto(&self, cx: &App) -> rpc::proto::File;
398
399 /// Return whether Zed considers this to be a private file.
400 fn is_private(&self) -> bool;
401}
402
403/// The file's storage status - whether it's stored (`Present`), and if so when it was last
404/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
405/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
406/// indicator for new files.
407#[derive(Copy, Clone, Debug, PartialEq)]
408pub enum DiskState {
409 /// File created in Zed that has not been saved.
410 New,
411 /// File present on the filesystem.
412 Present { mtime: MTime },
413 /// Deleted file that was previously present.
414 Deleted,
415}
416
417impl DiskState {
418 /// Returns the file's last known modification time on disk.
419 pub fn mtime(self) -> Option<MTime> {
420 match self {
421 DiskState::New => None,
422 DiskState::Present { mtime } => Some(mtime),
423 DiskState::Deleted => None,
424 }
425 }
426
427 pub fn exists(&self) -> bool {
428 match self {
429 DiskState::New => false,
430 DiskState::Present { .. } => true,
431 DiskState::Deleted => false,
432 }
433 }
434}
435
436/// The file associated with a buffer, in the case where the file is on the local disk.
437pub trait LocalFile: File {
438 /// Returns the absolute path of this file
439 fn abs_path(&self, cx: &App) -> PathBuf;
440
441 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
442 fn load(&self, cx: &App) -> Task<Result<String>>;
443
444 /// Loads the file's contents from disk.
445 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
446}
447
448/// The auto-indent behavior associated with an editing operation.
449/// For some editing operations, each affected line of text has its
450/// indentation recomputed. For other operations, the entire block
451/// of edited text is adjusted uniformly.
452#[derive(Clone, Debug)]
453pub enum AutoindentMode {
454 /// Indent each line of inserted text.
455 EachLine,
456 /// Apply the same indentation adjustment to all of the lines
457 /// in a given insertion.
458 Block {
459 /// The original indentation column of the first line of each
460 /// insertion, if it has been copied.
461 ///
462 /// Knowing this makes it possible to preserve the relative indentation
463 /// of every line in the insertion from when it was copied.
464 ///
465 /// If the original indent column is `a`, and the first line of insertion
466 /// is then auto-indented to column `b`, then every other line of
467 /// the insertion will be auto-indented to column `b - a`
468 original_indent_columns: Vec<Option<u32>>,
469 },
470}
471
472#[derive(Clone)]
473struct AutoindentRequest {
474 before_edit: BufferSnapshot,
475 entries: Vec<AutoindentRequestEntry>,
476 is_block_mode: bool,
477 ignore_empty_lines: bool,
478}
479
480#[derive(Debug, Clone)]
481struct AutoindentRequestEntry {
482 /// A range of the buffer whose indentation should be adjusted.
483 range: Range<Anchor>,
484 /// Whether or not these lines should be considered brand new, for the
485 /// purpose of auto-indent. When text is not new, its indentation will
486 /// only be adjusted if the suggested indentation level has *changed*
487 /// since the edit was made.
488 first_line_is_new: bool,
489 indent_size: IndentSize,
490 original_indent_column: Option<u32>,
491}
492
493#[derive(Debug)]
494struct IndentSuggestion {
495 basis_row: u32,
496 delta: Ordering,
497 within_error: bool,
498}
499
500struct BufferChunkHighlights<'a> {
501 captures: SyntaxMapCaptures<'a>,
502 next_capture: Option<SyntaxMapCapture<'a>>,
503 stack: Vec<(usize, HighlightId)>,
504 highlight_maps: Vec<HighlightMap>,
505}
506
507/// An iterator that yields chunks of a buffer's text, along with their
508/// syntax highlights and diagnostic status.
509pub struct BufferChunks<'a> {
510 buffer_snapshot: Option<&'a BufferSnapshot>,
511 range: Range<usize>,
512 chunks: text::Chunks<'a>,
513 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
514 error_depth: usize,
515 warning_depth: usize,
516 information_depth: usize,
517 hint_depth: usize,
518 unnecessary_depth: usize,
519 underline: bool,
520 highlights: Option<BufferChunkHighlights<'a>>,
521}
522
523/// A chunk of a buffer's text, along with its syntax highlight and
524/// diagnostic status.
525#[derive(Clone, Debug, Default)]
526pub struct Chunk<'a> {
527 /// The text of the chunk.
528 pub text: &'a str,
529 /// The syntax highlighting style of the chunk.
530 pub syntax_highlight_id: Option<HighlightId>,
531 /// The highlight style that has been applied to this chunk in
532 /// the editor.
533 pub highlight_style: Option<HighlightStyle>,
534 /// The severity of diagnostic associated with this chunk, if any.
535 pub diagnostic_severity: Option<DiagnosticSeverity>,
536 /// A bitset of which characters are tabs in this string.
537 pub tabs: u128,
538 /// Bitmap of character indices in this chunk
539 pub chars: u128,
540 /// Whether this chunk of text is marked as unnecessary.
541 pub is_unnecessary: bool,
542 /// Whether this chunk of text was originally a tab character.
543 pub is_tab: bool,
544 /// Whether this chunk of text was originally an inlay.
545 pub is_inlay: bool,
546 /// Whether to underline the corresponding text range in the editor.
547 pub underline: bool,
548}
549
550/// A set of edits to a given version of a buffer, computed asynchronously.
551#[derive(Debug)]
552pub struct Diff {
553 pub base_version: clock::Global,
554 pub line_ending: LineEnding,
555 pub edits: Vec<(Range<usize>, Arc<str>)>,
556}
557
558#[derive(Debug, Clone, Copy)]
559pub(crate) struct DiagnosticEndpoint {
560 offset: usize,
561 is_start: bool,
562 underline: bool,
563 severity: DiagnosticSeverity,
564 is_unnecessary: bool,
565}
566
567/// A class of characters, used for characterizing a run of text.
568#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
569pub enum CharKind {
570 /// Whitespace.
571 Whitespace,
572 /// Punctuation.
573 Punctuation,
574 /// Word.
575 Word,
576}
577
578/// Context for character classification within a specific scope.
579#[derive(Copy, Clone, Eq, PartialEq, Debug)]
580pub enum CharScopeContext {
581 /// Character classification for completion queries.
582 ///
583 /// This context treats certain characters as word constituents that would
584 /// normally be considered punctuation, such as '-' in Tailwind classes
585 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
586 Completion,
587 /// Character classification for linked edits.
588 ///
589 /// This context handles characters that should be treated as part of
590 /// identifiers during linked editing operations, such as '.' in JSX
591 /// component names like `<Animated.View>`.
592 LinkedEdit,
593}
594
595/// A runnable is a set of data about a region that could be resolved into a task
596pub struct Runnable {
597 pub tags: SmallVec<[RunnableTag; 1]>,
598 pub language: Arc<Language>,
599 pub buffer: BufferId,
600}
601
602#[derive(Default, Clone, Debug)]
603pub struct HighlightedText {
604 pub text: SharedString,
605 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
606}
607
608#[derive(Default, Debug)]
609struct HighlightedTextBuilder {
610 pub text: String,
611 highlights: Vec<(Range<usize>, HighlightStyle)>,
612}
613
614impl HighlightedText {
615 pub fn from_buffer_range<T: ToOffset>(
616 range: Range<T>,
617 snapshot: &text::BufferSnapshot,
618 syntax_snapshot: &SyntaxSnapshot,
619 override_style: Option<HighlightStyle>,
620 syntax_theme: &SyntaxTheme,
621 ) -> Self {
622 let mut highlighted_text = HighlightedTextBuilder::default();
623 highlighted_text.add_text_from_buffer_range(
624 range,
625 snapshot,
626 syntax_snapshot,
627 override_style,
628 syntax_theme,
629 );
630 highlighted_text.build()
631 }
632
633 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
634 gpui::StyledText::new(self.text.clone())
635 .with_default_highlights(default_style, self.highlights.iter().cloned())
636 }
637
638 /// Returns the first line without leading whitespace unless highlighted
639 /// and a boolean indicating if there are more lines after
640 pub fn first_line_preview(self) -> (Self, bool) {
641 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
642 let first_line = &self.text[..newline_ix];
643
644 // Trim leading whitespace, unless an edit starts prior to it.
645 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
646 if let Some((first_highlight_range, _)) = self.highlights.first() {
647 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
648 }
649
650 let preview_text = &first_line[preview_start_ix..];
651 let preview_highlights = self
652 .highlights
653 .into_iter()
654 .skip_while(|(range, _)| range.end <= preview_start_ix)
655 .take_while(|(range, _)| range.start < newline_ix)
656 .filter_map(|(mut range, highlight)| {
657 range.start = range.start.saturating_sub(preview_start_ix);
658 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
659 if range.is_empty() {
660 None
661 } else {
662 Some((range, highlight))
663 }
664 });
665
666 let preview = Self {
667 text: SharedString::new(preview_text),
668 highlights: preview_highlights.collect(),
669 };
670
671 (preview, self.text.len() > newline_ix)
672 }
673}
674
675impl HighlightedTextBuilder {
676 pub fn build(self) -> HighlightedText {
677 HighlightedText {
678 text: self.text.into(),
679 highlights: self.highlights,
680 }
681 }
682
683 pub fn add_text_from_buffer_range<T: ToOffset>(
684 &mut self,
685 range: Range<T>,
686 snapshot: &text::BufferSnapshot,
687 syntax_snapshot: &SyntaxSnapshot,
688 override_style: Option<HighlightStyle>,
689 syntax_theme: &SyntaxTheme,
690 ) {
691 let range = range.to_offset(snapshot);
692 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
693 let start = self.text.len();
694 self.text.push_str(chunk.text);
695 let end = self.text.len();
696
697 if let Some(highlight_style) = chunk
698 .syntax_highlight_id
699 .and_then(|id| id.style(syntax_theme))
700 {
701 let highlight_style = override_style.map_or(highlight_style, |override_style| {
702 highlight_style.highlight(override_style)
703 });
704 self.highlights.push((start..end, highlight_style));
705 } else if let Some(override_style) = override_style {
706 self.highlights.push((start..end, override_style));
707 }
708 }
709 }
710
711 fn highlighted_chunks<'a>(
712 range: Range<usize>,
713 snapshot: &'a text::BufferSnapshot,
714 syntax_snapshot: &'a SyntaxSnapshot,
715 ) -> BufferChunks<'a> {
716 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
717 grammar
718 .highlights_config
719 .as_ref()
720 .map(|config| &config.query)
721 });
722
723 let highlight_maps = captures
724 .grammars()
725 .iter()
726 .map(|grammar| grammar.highlight_map())
727 .collect();
728
729 BufferChunks::new(
730 snapshot.as_rope(),
731 range,
732 Some((captures, highlight_maps)),
733 false,
734 None,
735 )
736 }
737}
738
739#[derive(Clone)]
740pub struct EditPreview {
741 old_snapshot: text::BufferSnapshot,
742 applied_edits_snapshot: text::BufferSnapshot,
743 syntax_snapshot: SyntaxSnapshot,
744}
745
746impl EditPreview {
747 pub fn highlight_edits(
748 &self,
749 current_snapshot: &BufferSnapshot,
750 edits: &[(Range<Anchor>, impl AsRef<str>)],
751 include_deletions: bool,
752 cx: &App,
753 ) -> HighlightedText {
754 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
755 return HighlightedText::default();
756 };
757
758 let mut highlighted_text = HighlightedTextBuilder::default();
759
760 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
761
762 let insertion_highlight_style = HighlightStyle {
763 background_color: Some(cx.theme().status().created_background),
764 ..Default::default()
765 };
766 let deletion_highlight_style = HighlightStyle {
767 background_color: Some(cx.theme().status().deleted_background),
768 ..Default::default()
769 };
770 let syntax_theme = cx.theme().syntax();
771
772 for (range, edit_text) in edits {
773 let edit_new_end_in_preview_snapshot = range
774 .end
775 .bias_right(&self.old_snapshot)
776 .to_offset(&self.applied_edits_snapshot);
777 let edit_start_in_preview_snapshot =
778 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
779
780 let unchanged_range_in_preview_snapshot =
781 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
782 if !unchanged_range_in_preview_snapshot.is_empty() {
783 highlighted_text.add_text_from_buffer_range(
784 unchanged_range_in_preview_snapshot,
785 &self.applied_edits_snapshot,
786 &self.syntax_snapshot,
787 None,
788 syntax_theme,
789 );
790 }
791
792 let range_in_current_snapshot = range.to_offset(current_snapshot);
793 if include_deletions && !range_in_current_snapshot.is_empty() {
794 highlighted_text.add_text_from_buffer_range(
795 range_in_current_snapshot,
796 ¤t_snapshot.text,
797 ¤t_snapshot.syntax,
798 Some(deletion_highlight_style),
799 syntax_theme,
800 );
801 }
802
803 if !edit_text.as_ref().is_empty() {
804 highlighted_text.add_text_from_buffer_range(
805 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
806 &self.applied_edits_snapshot,
807 &self.syntax_snapshot,
808 Some(insertion_highlight_style),
809 syntax_theme,
810 );
811 }
812
813 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
814 }
815
816 highlighted_text.add_text_from_buffer_range(
817 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
818 &self.applied_edits_snapshot,
819 &self.syntax_snapshot,
820 None,
821 syntax_theme,
822 );
823
824 highlighted_text.build()
825 }
826
827 fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<usize>> {
828 let (first, _) = edits.first()?;
829 let (last, _) = edits.last()?;
830
831 let start = first
832 .start
833 .bias_left(&self.old_snapshot)
834 .to_point(&self.applied_edits_snapshot);
835 let end = last
836 .end
837 .bias_right(&self.old_snapshot)
838 .to_point(&self.applied_edits_snapshot);
839
840 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
841 let range = Point::new(start.row, 0)
842 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
843
844 Some(range.to_offset(&self.applied_edits_snapshot))
845 }
846}
847
848#[derive(Clone, Debug, PartialEq, Eq)]
849pub struct BracketMatch {
850 pub open_range: Range<usize>,
851 pub close_range: Range<usize>,
852 pub newline_only: bool,
853 pub id: Option<usize>,
854}
855
856impl BracketMatch {
857 pub fn bracket_ranges(self) -> (Range<usize>, Range<usize>) {
858 (self.open_range, self.close_range)
859 }
860}
861
862impl Buffer {
863 /// Create a new buffer with the given base text.
864 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
865 Self::build(
866 TextBuffer::new(
867 ReplicaId::LOCAL,
868 cx.entity_id().as_non_zero_u64().into(),
869 base_text.into(),
870 ),
871 None,
872 Capability::ReadWrite,
873 )
874 }
875
876 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
877 pub fn local_normalized(
878 base_text_normalized: Rope,
879 line_ending: LineEnding,
880 cx: &Context<Self>,
881 ) -> Self {
882 Self::build(
883 TextBuffer::new_normalized(
884 ReplicaId::LOCAL,
885 cx.entity_id().as_non_zero_u64().into(),
886 line_ending,
887 base_text_normalized,
888 ),
889 None,
890 Capability::ReadWrite,
891 )
892 }
893
894 /// Create a new buffer that is a replica of a remote buffer.
895 pub fn remote(
896 remote_id: BufferId,
897 replica_id: ReplicaId,
898 capability: Capability,
899 base_text: impl Into<String>,
900 ) -> Self {
901 Self::build(
902 TextBuffer::new(replica_id, remote_id, base_text.into()),
903 None,
904 capability,
905 )
906 }
907
908 /// Create a new buffer that is a replica of a remote buffer, populating its
909 /// state from the given protobuf message.
910 pub fn from_proto(
911 replica_id: ReplicaId,
912 capability: Capability,
913 message: proto::BufferState,
914 file: Option<Arc<dyn File>>,
915 ) -> Result<Self> {
916 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
917 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
918 let mut this = Self::build(buffer, file, capability);
919 this.text.set_line_ending(proto::deserialize_line_ending(
920 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
921 ));
922 this.saved_version = proto::deserialize_version(&message.saved_version);
923 this.saved_mtime = message.saved_mtime.map(|time| time.into());
924 Ok(this)
925 }
926
927 /// Serialize the buffer's state to a protobuf message.
928 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
929 proto::BufferState {
930 id: self.remote_id().into(),
931 file: self.file.as_ref().map(|f| f.to_proto(cx)),
932 base_text: self.base_text().to_string(),
933 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
934 saved_version: proto::serialize_version(&self.saved_version),
935 saved_mtime: self.saved_mtime.map(|time| time.into()),
936 }
937 }
938
939 /// Serialize as protobufs all of the changes to the buffer since the given version.
940 pub fn serialize_ops(
941 &self,
942 since: Option<clock::Global>,
943 cx: &App,
944 ) -> Task<Vec<proto::Operation>> {
945 let mut operations = Vec::new();
946 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
947
948 operations.extend(self.remote_selections.iter().map(|(_, set)| {
949 proto::serialize_operation(&Operation::UpdateSelections {
950 selections: set.selections.clone(),
951 lamport_timestamp: set.lamport_timestamp,
952 line_mode: set.line_mode,
953 cursor_shape: set.cursor_shape,
954 })
955 }));
956
957 for (server_id, diagnostics) in &self.diagnostics {
958 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
959 lamport_timestamp: self.diagnostics_timestamp,
960 server_id: *server_id,
961 diagnostics: diagnostics.iter().cloned().collect(),
962 }));
963 }
964
965 for (server_id, completions) in &self.completion_triggers_per_language_server {
966 operations.push(proto::serialize_operation(
967 &Operation::UpdateCompletionTriggers {
968 triggers: completions.iter().cloned().collect(),
969 lamport_timestamp: self.completion_triggers_timestamp,
970 server_id: *server_id,
971 },
972 ));
973 }
974
975 let text_operations = self.text.operations().clone();
976 cx.background_spawn(async move {
977 let since = since.unwrap_or_default();
978 operations.extend(
979 text_operations
980 .iter()
981 .filter(|(_, op)| !since.observed(op.timestamp()))
982 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
983 );
984 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
985 operations
986 })
987 }
988
989 /// Assign a language to the buffer, returning the buffer.
990 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
991 self.set_language(Some(language), cx);
992 self
993 }
994
995 /// Returns the [`Capability`] of this buffer.
996 pub fn capability(&self) -> Capability {
997 self.capability
998 }
999
1000 /// Whether this buffer can only be read.
1001 pub fn read_only(&self) -> bool {
1002 self.capability == Capability::ReadOnly
1003 }
1004
1005 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1006 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1007 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1008 let snapshot = buffer.snapshot();
1009 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1010 let tree_sitter_data = TreeSitterData::new(snapshot);
1011 Self {
1012 saved_mtime,
1013 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1014 saved_version: buffer.version(),
1015 preview_version: buffer.version(),
1016 reload_task: None,
1017 transaction_depth: 0,
1018 was_dirty_before_starting_transaction: None,
1019 has_unsaved_edits: Cell::new((buffer.version(), false)),
1020 text: buffer,
1021 branch_state: None,
1022 file,
1023 capability,
1024 syntax_map,
1025 reparse: None,
1026 non_text_state_update_count: 0,
1027 sync_parse_timeout: Duration::from_millis(1),
1028 parse_status: watch::channel(ParseStatus::Idle),
1029 autoindent_requests: Default::default(),
1030 wait_for_autoindent_txs: Default::default(),
1031 pending_autoindent: Default::default(),
1032 language: None,
1033 remote_selections: Default::default(),
1034 diagnostics: Default::default(),
1035 diagnostics_timestamp: Lamport::MIN,
1036 completion_triggers: Default::default(),
1037 completion_triggers_per_language_server: Default::default(),
1038 completion_triggers_timestamp: Lamport::MIN,
1039 deferred_ops: OperationQueue::new(),
1040 has_conflict: false,
1041 change_bits: Default::default(),
1042 _subscriptions: Vec::new(),
1043 }
1044 }
1045
1046 pub fn build_snapshot(
1047 text: Rope,
1048 language: Option<Arc<Language>>,
1049 language_registry: Option<Arc<LanguageRegistry>>,
1050 cx: &mut App,
1051 ) -> impl Future<Output = BufferSnapshot> + use<> {
1052 let entity_id = cx.reserve_entity::<Self>().entity_id();
1053 let buffer_id = entity_id.as_non_zero_u64().into();
1054 async move {
1055 let text =
1056 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1057 .snapshot();
1058 let mut syntax = SyntaxMap::new(&text).snapshot();
1059 if let Some(language) = language.clone() {
1060 let language_registry = language_registry.clone();
1061 syntax.reparse(&text, language_registry, language);
1062 }
1063 let tree_sitter_data = TreeSitterData::new(text.clone());
1064 BufferSnapshot {
1065 text,
1066 syntax,
1067 file: None,
1068 diagnostics: Default::default(),
1069 remote_selections: Default::default(),
1070 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1071 language,
1072 non_text_state_update_count: 0,
1073 }
1074 }
1075 }
1076
1077 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1078 let entity_id = cx.reserve_entity::<Self>().entity_id();
1079 let buffer_id = entity_id.as_non_zero_u64().into();
1080 let text = TextBuffer::new_normalized(
1081 ReplicaId::LOCAL,
1082 buffer_id,
1083 Default::default(),
1084 Rope::new(),
1085 )
1086 .snapshot();
1087 let syntax = SyntaxMap::new(&text).snapshot();
1088 let tree_sitter_data = TreeSitterData::new(text.clone());
1089 BufferSnapshot {
1090 text,
1091 syntax,
1092 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1093 file: None,
1094 diagnostics: Default::default(),
1095 remote_selections: Default::default(),
1096 language: None,
1097 non_text_state_update_count: 0,
1098 }
1099 }
1100
1101 #[cfg(any(test, feature = "test-support"))]
1102 pub fn build_snapshot_sync(
1103 text: Rope,
1104 language: Option<Arc<Language>>,
1105 language_registry: Option<Arc<LanguageRegistry>>,
1106 cx: &mut App,
1107 ) -> BufferSnapshot {
1108 let entity_id = cx.reserve_entity::<Self>().entity_id();
1109 let buffer_id = entity_id.as_non_zero_u64().into();
1110 let text =
1111 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1112 .snapshot();
1113 let mut syntax = SyntaxMap::new(&text).snapshot();
1114 if let Some(language) = language.clone() {
1115 syntax.reparse(&text, language_registry, language);
1116 }
1117 let tree_sitter_data = TreeSitterData::new(text.clone());
1118 BufferSnapshot {
1119 text,
1120 syntax,
1121 tree_sitter_data: Arc::new(Mutex::new(tree_sitter_data)),
1122 file: None,
1123 diagnostics: Default::default(),
1124 remote_selections: Default::default(),
1125 language,
1126 non_text_state_update_count: 0,
1127 }
1128 }
1129
1130 /// Retrieve a snapshot of the buffer's current state. This is computationally
1131 /// cheap, and allows reading from the buffer on a background thread.
1132 pub fn snapshot(&self) -> BufferSnapshot {
1133 let text = self.text.snapshot();
1134 let mut syntax_map = self.syntax_map.lock();
1135 syntax_map.interpolate(&text);
1136 let syntax = syntax_map.snapshot();
1137
1138 BufferSnapshot {
1139 text,
1140 syntax,
1141 tree_sitter_data: self.tree_sitter_data.clone(),
1142 file: self.file.clone(),
1143 remote_selections: self.remote_selections.clone(),
1144 diagnostics: self.diagnostics.clone(),
1145 language: self.language.clone(),
1146 non_text_state_update_count: self.non_text_state_update_count,
1147 }
1148 }
1149
1150 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1151 let this = cx.entity();
1152 cx.new(|cx| {
1153 let mut branch = Self {
1154 branch_state: Some(BufferBranchState {
1155 base_buffer: this.clone(),
1156 merged_operations: Default::default(),
1157 }),
1158 language: self.language.clone(),
1159 has_conflict: self.has_conflict,
1160 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1161 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1162 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1163 };
1164 if let Some(language_registry) = self.language_registry() {
1165 branch.set_language_registry(language_registry);
1166 }
1167
1168 // Reparse the branch buffer so that we get syntax highlighting immediately.
1169 branch.reparse(cx);
1170
1171 branch
1172 })
1173 }
1174
1175 pub fn preview_edits(
1176 &self,
1177 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1178 cx: &App,
1179 ) -> Task<EditPreview> {
1180 let registry = self.language_registry();
1181 let language = self.language().cloned();
1182 let old_snapshot = self.text.snapshot();
1183 let mut branch_buffer = self.text.branch();
1184 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1185 cx.background_spawn(async move {
1186 if !edits.is_empty() {
1187 if let Some(language) = language.clone() {
1188 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1189 }
1190
1191 branch_buffer.edit(edits.iter().cloned());
1192 let snapshot = branch_buffer.snapshot();
1193 syntax_snapshot.interpolate(&snapshot);
1194
1195 if let Some(language) = language {
1196 syntax_snapshot.reparse(&snapshot, registry, language);
1197 }
1198 }
1199 EditPreview {
1200 old_snapshot,
1201 applied_edits_snapshot: branch_buffer.snapshot(),
1202 syntax_snapshot,
1203 }
1204 })
1205 }
1206
1207 /// Applies all of the changes in this buffer that intersect any of the
1208 /// given `ranges` to its base buffer.
1209 ///
1210 /// If `ranges` is empty, then all changes will be applied. This buffer must
1211 /// be a branch buffer to call this method.
1212 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1213 let Some(base_buffer) = self.base_buffer() else {
1214 debug_panic!("not a branch buffer");
1215 return;
1216 };
1217
1218 let mut ranges = if ranges.is_empty() {
1219 &[0..usize::MAX]
1220 } else {
1221 ranges.as_slice()
1222 }
1223 .iter()
1224 .peekable();
1225
1226 let mut edits = Vec::new();
1227 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1228 let mut is_included = false;
1229 while let Some(range) = ranges.peek() {
1230 if range.end < edit.new.start {
1231 ranges.next().unwrap();
1232 } else {
1233 if range.start <= edit.new.end {
1234 is_included = true;
1235 }
1236 break;
1237 }
1238 }
1239
1240 if is_included {
1241 edits.push((
1242 edit.old.clone(),
1243 self.text_for_range(edit.new.clone()).collect::<String>(),
1244 ));
1245 }
1246 }
1247
1248 let operation = base_buffer.update(cx, |base_buffer, cx| {
1249 // cx.emit(BufferEvent::DiffBaseChanged);
1250 base_buffer.edit(edits, None, cx)
1251 });
1252
1253 if let Some(operation) = operation
1254 && let Some(BufferBranchState {
1255 merged_operations, ..
1256 }) = &mut self.branch_state
1257 {
1258 merged_operations.push(operation);
1259 }
1260 }
1261
1262 fn on_base_buffer_event(
1263 &mut self,
1264 _: Entity<Buffer>,
1265 event: &BufferEvent,
1266 cx: &mut Context<Self>,
1267 ) {
1268 let BufferEvent::Operation { operation, .. } = event else {
1269 return;
1270 };
1271 let Some(BufferBranchState {
1272 merged_operations, ..
1273 }) = &mut self.branch_state
1274 else {
1275 return;
1276 };
1277
1278 let mut operation_to_undo = None;
1279 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1280 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1281 {
1282 merged_operations.remove(ix);
1283 operation_to_undo = Some(operation.timestamp);
1284 }
1285
1286 self.apply_ops([operation.clone()], cx);
1287
1288 if let Some(timestamp) = operation_to_undo {
1289 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1290 self.undo_operations(counts, cx);
1291 }
1292 }
1293
1294 #[cfg(test)]
1295 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1296 &self.text
1297 }
1298
1299 /// Retrieve a snapshot of the buffer's raw text, without any
1300 /// language-related state like the syntax tree or diagnostics.
1301 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1302 self.text.snapshot()
1303 }
1304
1305 /// The file associated with the buffer, if any.
1306 pub fn file(&self) -> Option<&Arc<dyn File>> {
1307 self.file.as_ref()
1308 }
1309
1310 /// The version of the buffer that was last saved or reloaded from disk.
1311 pub fn saved_version(&self) -> &clock::Global {
1312 &self.saved_version
1313 }
1314
1315 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1316 pub fn saved_mtime(&self) -> Option<MTime> {
1317 self.saved_mtime
1318 }
1319
1320 /// Assign a language to the buffer.
1321 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1322 self.non_text_state_update_count += 1;
1323 self.syntax_map.lock().clear(&self.text);
1324 self.language = language;
1325 self.was_changed();
1326 self.reparse(cx);
1327 cx.emit(BufferEvent::LanguageChanged);
1328 }
1329
1330 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1331 /// other languages if parts of the buffer are written in different languages.
1332 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1333 self.syntax_map
1334 .lock()
1335 .set_language_registry(language_registry);
1336 }
1337
1338 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1339 self.syntax_map.lock().language_registry()
1340 }
1341
1342 /// Assign the line ending type to the buffer.
1343 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1344 self.text.set_line_ending(line_ending);
1345
1346 let lamport_timestamp = self.text.lamport_clock.tick();
1347 self.send_operation(
1348 Operation::UpdateLineEnding {
1349 line_ending,
1350 lamport_timestamp,
1351 },
1352 true,
1353 cx,
1354 );
1355 }
1356
1357 /// Assign the buffer a new [`Capability`].
1358 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1359 if self.capability != capability {
1360 self.capability = capability;
1361 cx.emit(BufferEvent::CapabilityChanged)
1362 }
1363 }
1364
1365 /// This method is called to signal that the buffer has been saved.
1366 pub fn did_save(
1367 &mut self,
1368 version: clock::Global,
1369 mtime: Option<MTime>,
1370 cx: &mut Context<Self>,
1371 ) {
1372 self.saved_version = version.clone();
1373 self.has_unsaved_edits.set((version, false));
1374 self.has_conflict = false;
1375 self.saved_mtime = mtime;
1376 self.was_changed();
1377 cx.emit(BufferEvent::Saved);
1378 cx.notify();
1379 }
1380
1381 /// Reloads the contents of the buffer from disk.
1382 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1383 let (tx, rx) = futures::channel::oneshot::channel();
1384 let prev_version = self.text.version();
1385 self.reload_task = Some(cx.spawn(async move |this, cx| {
1386 let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
1387 let file = this.file.as_ref()?.as_local()?;
1388
1389 Some((file.disk_state().mtime(), file.load(cx)))
1390 })?
1391 else {
1392 return Ok(());
1393 };
1394
1395 let new_text = new_text.await?;
1396 let diff = this
1397 .update(cx, |this, cx| this.diff(new_text.clone(), cx))?
1398 .await;
1399 this.update(cx, |this, cx| {
1400 if this.version() == diff.base_version {
1401 this.finalize_last_transaction();
1402 this.apply_diff(diff, cx);
1403 tx.send(this.finalize_last_transaction().cloned()).ok();
1404 this.has_conflict = false;
1405 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1406 } else {
1407 if !diff.edits.is_empty()
1408 || this
1409 .edits_since::<usize>(&diff.base_version)
1410 .next()
1411 .is_some()
1412 {
1413 this.has_conflict = true;
1414 }
1415
1416 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1417 }
1418
1419 this.reload_task.take();
1420 })
1421 }));
1422 rx
1423 }
1424
1425 /// This method is called to signal that the buffer has been reloaded.
1426 pub fn did_reload(
1427 &mut self,
1428 version: clock::Global,
1429 line_ending: LineEnding,
1430 mtime: Option<MTime>,
1431 cx: &mut Context<Self>,
1432 ) {
1433 self.saved_version = version;
1434 self.has_unsaved_edits
1435 .set((self.saved_version.clone(), false));
1436 self.text.set_line_ending(line_ending);
1437 self.saved_mtime = mtime;
1438 cx.emit(BufferEvent::Reloaded);
1439 cx.notify();
1440 }
1441
1442 /// Updates the [`File`] backing this buffer. This should be called when
1443 /// the file has changed or has been deleted.
1444 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1445 let was_dirty = self.is_dirty();
1446 let mut file_changed = false;
1447
1448 if let Some(old_file) = self.file.as_ref() {
1449 if new_file.path() != old_file.path() {
1450 file_changed = true;
1451 }
1452
1453 let old_state = old_file.disk_state();
1454 let new_state = new_file.disk_state();
1455 if old_state != new_state {
1456 file_changed = true;
1457 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1458 cx.emit(BufferEvent::ReloadNeeded)
1459 }
1460 }
1461 } else {
1462 file_changed = true;
1463 };
1464
1465 self.file = Some(new_file);
1466 if file_changed {
1467 self.was_changed();
1468 self.non_text_state_update_count += 1;
1469 if was_dirty != self.is_dirty() {
1470 cx.emit(BufferEvent::DirtyChanged);
1471 }
1472 cx.emit(BufferEvent::FileHandleChanged);
1473 cx.notify();
1474 }
1475 }
1476
1477 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1478 Some(self.branch_state.as_ref()?.base_buffer.clone())
1479 }
1480
1481 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1482 pub fn language(&self) -> Option<&Arc<Language>> {
1483 self.language.as_ref()
1484 }
1485
1486 /// Returns the [`Language`] at the given location.
1487 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1488 let offset = position.to_offset(self);
1489 let mut is_first = true;
1490 let start_anchor = self.anchor_before(offset);
1491 let end_anchor = self.anchor_after(offset);
1492 self.syntax_map
1493 .lock()
1494 .layers_for_range(offset..offset, &self.text, false)
1495 .filter(|layer| {
1496 if is_first {
1497 is_first = false;
1498 return true;
1499 }
1500
1501 layer
1502 .included_sub_ranges
1503 .map(|sub_ranges| {
1504 sub_ranges.iter().any(|sub_range| {
1505 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1506 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1507 !is_before_start && !is_after_end
1508 })
1509 })
1510 .unwrap_or(true)
1511 })
1512 .last()
1513 .map(|info| info.language.clone())
1514 .or_else(|| self.language.clone())
1515 }
1516
1517 /// Returns each [`Language`] for the active syntax layers at the given location.
1518 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1519 let offset = position.to_offset(self);
1520 let mut languages: Vec<Arc<Language>> = self
1521 .syntax_map
1522 .lock()
1523 .layers_for_range(offset..offset, &self.text, false)
1524 .map(|info| info.language.clone())
1525 .collect();
1526
1527 if languages.is_empty()
1528 && let Some(buffer_language) = self.language()
1529 {
1530 languages.push(buffer_language.clone());
1531 }
1532
1533 languages
1534 }
1535
1536 /// An integer version number that accounts for all updates besides
1537 /// the buffer's text itself (which is versioned via a version vector).
1538 pub fn non_text_state_update_count(&self) -> usize {
1539 self.non_text_state_update_count
1540 }
1541
1542 /// Whether the buffer is being parsed in the background.
1543 #[cfg(any(test, feature = "test-support"))]
1544 pub fn is_parsing(&self) -> bool {
1545 self.reparse.is_some()
1546 }
1547
1548 /// Indicates whether the buffer contains any regions that may be
1549 /// written in a language that hasn't been loaded yet.
1550 pub fn contains_unknown_injections(&self) -> bool {
1551 self.syntax_map.lock().contains_unknown_injections()
1552 }
1553
1554 #[cfg(any(test, feature = "test-support"))]
1555 pub fn set_sync_parse_timeout(&mut self, timeout: Duration) {
1556 self.sync_parse_timeout = timeout;
1557 }
1558
1559 /// Called after an edit to synchronize the buffer's main parse tree with
1560 /// the buffer's new underlying state.
1561 ///
1562 /// Locks the syntax map and interpolates the edits since the last reparse
1563 /// into the foreground syntax tree.
1564 ///
1565 /// Then takes a stable snapshot of the syntax map before unlocking it.
1566 /// The snapshot with the interpolated edits is sent to a background thread,
1567 /// where we ask Tree-sitter to perform an incremental parse.
1568 ///
1569 /// Meanwhile, in the foreground, we block the main thread for up to 1ms
1570 /// waiting on the parse to complete. As soon as it completes, we proceed
1571 /// synchronously, unless a 1ms timeout elapses.
1572 ///
1573 /// If we time out waiting on the parse, we spawn a second task waiting
1574 /// until the parse does complete and return with the interpolated tree still
1575 /// in the foreground. When the background parse completes, call back into
1576 /// the main thread and assign the foreground parse state.
1577 ///
1578 /// If the buffer or grammar changed since the start of the background parse,
1579 /// initiate an additional reparse recursively. To avoid concurrent parses
1580 /// for the same buffer, we only initiate a new parse if we are not already
1581 /// parsing in the background.
1582 pub fn reparse(&mut self, cx: &mut Context<Self>) {
1583 if self.reparse.is_some() {
1584 return;
1585 }
1586 let language = if let Some(language) = self.language.clone() {
1587 language
1588 } else {
1589 return;
1590 };
1591
1592 let text = self.text_snapshot();
1593 let parsed_version = self.version();
1594
1595 let mut syntax_map = self.syntax_map.lock();
1596 syntax_map.interpolate(&text);
1597 let language_registry = syntax_map.language_registry();
1598 let mut syntax_snapshot = syntax_map.snapshot();
1599 drop(syntax_map);
1600
1601 let parse_task = cx.background_spawn({
1602 let language = language.clone();
1603 let language_registry = language_registry.clone();
1604 async move {
1605 syntax_snapshot.reparse(&text, language_registry, language);
1606 syntax_snapshot
1607 }
1608 });
1609
1610 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1611 match cx
1612 .background_executor()
1613 .block_with_timeout(self.sync_parse_timeout, parse_task)
1614 {
1615 Ok(new_syntax_snapshot) => {
1616 self.did_finish_parsing(new_syntax_snapshot, cx);
1617 self.reparse = None;
1618 }
1619 Err(parse_task) => {
1620 // todo(lw): hot foreground spawn
1621 self.reparse = Some(cx.spawn(async move |this, cx| {
1622 let new_syntax_map = cx.background_spawn(parse_task).await;
1623 this.update(cx, move |this, cx| {
1624 let grammar_changed = || {
1625 this.language.as_ref().is_none_or(|current_language| {
1626 !Arc::ptr_eq(&language, current_language)
1627 })
1628 };
1629 let language_registry_changed = || {
1630 new_syntax_map.contains_unknown_injections()
1631 && language_registry.is_some_and(|registry| {
1632 registry.version() != new_syntax_map.language_registry_version()
1633 })
1634 };
1635 let parse_again = this.version.changed_since(&parsed_version)
1636 || language_registry_changed()
1637 || grammar_changed();
1638 this.did_finish_parsing(new_syntax_map, cx);
1639 this.reparse = None;
1640 if parse_again {
1641 this.reparse(cx);
1642 }
1643 })
1644 .ok();
1645 }));
1646 }
1647 }
1648 }
1649
1650 fn did_finish_parsing(&mut self, syntax_snapshot: SyntaxSnapshot, cx: &mut Context<Self>) {
1651 self.was_changed();
1652 self.non_text_state_update_count += 1;
1653 self.syntax_map.lock().did_parse(syntax_snapshot);
1654 self.request_autoindent(cx);
1655 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1656 self.tree_sitter_data.lock().clear();
1657 cx.emit(BufferEvent::Reparsed);
1658 cx.notify();
1659 }
1660
1661 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1662 self.parse_status.1.clone()
1663 }
1664
1665 /// Wait until the buffer is no longer parsing
1666 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1667 let mut parse_status = self.parse_status();
1668 async move {
1669 while *parse_status.borrow() != ParseStatus::Idle {
1670 if parse_status.changed().await.is_err() {
1671 break;
1672 }
1673 }
1674 }
1675 }
1676
1677 /// Assign to the buffer a set of diagnostics created by a given language server.
1678 pub fn update_diagnostics(
1679 &mut self,
1680 server_id: LanguageServerId,
1681 diagnostics: DiagnosticSet,
1682 cx: &mut Context<Self>,
1683 ) {
1684 let lamport_timestamp = self.text.lamport_clock.tick();
1685 let op = Operation::UpdateDiagnostics {
1686 server_id,
1687 diagnostics: diagnostics.iter().cloned().collect(),
1688 lamport_timestamp,
1689 };
1690
1691 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1692 self.send_operation(op, true, cx);
1693 }
1694
1695 pub fn buffer_diagnostics(
1696 &self,
1697 for_server: Option<LanguageServerId>,
1698 ) -> Vec<&DiagnosticEntry<Anchor>> {
1699 match for_server {
1700 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1701 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1702 Err(_) => Vec::new(),
1703 },
1704 None => self
1705 .diagnostics
1706 .iter()
1707 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1708 .collect(),
1709 }
1710 }
1711
1712 fn request_autoindent(&mut self, cx: &mut Context<Self>) {
1713 if let Some(indent_sizes) = self.compute_autoindents() {
1714 let indent_sizes = cx.background_spawn(indent_sizes);
1715 match cx
1716 .background_executor()
1717 .block_with_timeout(Duration::from_micros(500), indent_sizes)
1718 {
1719 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1720 Err(indent_sizes) => {
1721 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1722 let indent_sizes = indent_sizes.await;
1723 this.update(cx, |this, cx| {
1724 this.apply_autoindents(indent_sizes, cx);
1725 })
1726 .ok();
1727 }));
1728 }
1729 }
1730 } else {
1731 self.autoindent_requests.clear();
1732 for tx in self.wait_for_autoindent_txs.drain(..) {
1733 tx.send(()).ok();
1734 }
1735 }
1736 }
1737
1738 fn compute_autoindents(
1739 &self,
1740 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1741 let max_rows_between_yields = 100;
1742 let snapshot = self.snapshot();
1743 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1744 return None;
1745 }
1746
1747 let autoindent_requests = self.autoindent_requests.clone();
1748 Some(async move {
1749 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1750 for request in autoindent_requests {
1751 // Resolve each edited range to its row in the current buffer and in the
1752 // buffer before this batch of edits.
1753 let mut row_ranges = Vec::new();
1754 let mut old_to_new_rows = BTreeMap::new();
1755 let mut language_indent_sizes_by_new_row = Vec::new();
1756 for entry in &request.entries {
1757 let position = entry.range.start;
1758 let new_row = position.to_point(&snapshot).row;
1759 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1760 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1761
1762 if !entry.first_line_is_new {
1763 let old_row = position.to_point(&request.before_edit).row;
1764 old_to_new_rows.insert(old_row, new_row);
1765 }
1766 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1767 }
1768
1769 // Build a map containing the suggested indentation for each of the edited lines
1770 // with respect to the state of the buffer before these edits. This map is keyed
1771 // by the rows for these lines in the current state of the buffer.
1772 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1773 let old_edited_ranges =
1774 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1775 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1776 let mut language_indent_size = IndentSize::default();
1777 for old_edited_range in old_edited_ranges {
1778 let suggestions = request
1779 .before_edit
1780 .suggest_autoindents(old_edited_range.clone())
1781 .into_iter()
1782 .flatten();
1783 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1784 if let Some(suggestion) = suggestion {
1785 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1786
1787 // Find the indent size based on the language for this row.
1788 while let Some((row, size)) = language_indent_sizes.peek() {
1789 if *row > new_row {
1790 break;
1791 }
1792 language_indent_size = *size;
1793 language_indent_sizes.next();
1794 }
1795
1796 let suggested_indent = old_to_new_rows
1797 .get(&suggestion.basis_row)
1798 .and_then(|from_row| {
1799 Some(old_suggestions.get(from_row).copied()?.0)
1800 })
1801 .unwrap_or_else(|| {
1802 request
1803 .before_edit
1804 .indent_size_for_line(suggestion.basis_row)
1805 })
1806 .with_delta(suggestion.delta, language_indent_size);
1807 old_suggestions
1808 .insert(new_row, (suggested_indent, suggestion.within_error));
1809 }
1810 }
1811 yield_now().await;
1812 }
1813
1814 // Compute new suggestions for each line, but only include them in the result
1815 // if they differ from the old suggestion for that line.
1816 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1817 let mut language_indent_size = IndentSize::default();
1818 for (row_range, original_indent_column) in row_ranges {
1819 let new_edited_row_range = if request.is_block_mode {
1820 row_range.start..row_range.start + 1
1821 } else {
1822 row_range.clone()
1823 };
1824
1825 let suggestions = snapshot
1826 .suggest_autoindents(new_edited_row_range.clone())
1827 .into_iter()
1828 .flatten();
1829 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1830 if let Some(suggestion) = suggestion {
1831 // Find the indent size based on the language for this row.
1832 while let Some((row, size)) = language_indent_sizes.peek() {
1833 if *row > new_row {
1834 break;
1835 }
1836 language_indent_size = *size;
1837 language_indent_sizes.next();
1838 }
1839
1840 let suggested_indent = indent_sizes
1841 .get(&suggestion.basis_row)
1842 .copied()
1843 .map(|e| e.0)
1844 .unwrap_or_else(|| {
1845 snapshot.indent_size_for_line(suggestion.basis_row)
1846 })
1847 .with_delta(suggestion.delta, language_indent_size);
1848
1849 if old_suggestions.get(&new_row).is_none_or(
1850 |(old_indentation, was_within_error)| {
1851 suggested_indent != *old_indentation
1852 && (!suggestion.within_error || *was_within_error)
1853 },
1854 ) {
1855 indent_sizes.insert(
1856 new_row,
1857 (suggested_indent, request.ignore_empty_lines),
1858 );
1859 }
1860 }
1861 }
1862
1863 if let (true, Some(original_indent_column)) =
1864 (request.is_block_mode, original_indent_column)
1865 {
1866 let new_indent =
1867 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1868 *indent
1869 } else {
1870 snapshot.indent_size_for_line(row_range.start)
1871 };
1872 let delta = new_indent.len as i64 - original_indent_column as i64;
1873 if delta != 0 {
1874 for row in row_range.skip(1) {
1875 indent_sizes.entry(row).or_insert_with(|| {
1876 let mut size = snapshot.indent_size_for_line(row);
1877 if size.kind == new_indent.kind {
1878 match delta.cmp(&0) {
1879 Ordering::Greater => size.len += delta as u32,
1880 Ordering::Less => {
1881 size.len = size.len.saturating_sub(-delta as u32)
1882 }
1883 Ordering::Equal => {}
1884 }
1885 }
1886 (size, request.ignore_empty_lines)
1887 });
1888 }
1889 }
1890 }
1891
1892 yield_now().await;
1893 }
1894 }
1895
1896 indent_sizes
1897 .into_iter()
1898 .filter_map(|(row, (indent, ignore_empty_lines))| {
1899 if ignore_empty_lines && snapshot.line_len(row) == 0 {
1900 None
1901 } else {
1902 Some((row, indent))
1903 }
1904 })
1905 .collect()
1906 })
1907 }
1908
1909 fn apply_autoindents(
1910 &mut self,
1911 indent_sizes: BTreeMap<u32, IndentSize>,
1912 cx: &mut Context<Self>,
1913 ) {
1914 self.autoindent_requests.clear();
1915 for tx in self.wait_for_autoindent_txs.drain(..) {
1916 tx.send(()).ok();
1917 }
1918
1919 let edits: Vec<_> = indent_sizes
1920 .into_iter()
1921 .filter_map(|(row, indent_size)| {
1922 let current_size = indent_size_for_line(self, row);
1923 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
1924 })
1925 .collect();
1926
1927 let preserve_preview = self.preserve_preview();
1928 self.edit(edits, None, cx);
1929 if preserve_preview {
1930 self.refresh_preview();
1931 }
1932 }
1933
1934 /// Create a minimal edit that will cause the given row to be indented
1935 /// with the given size. After applying this edit, the length of the line
1936 /// will always be at least `new_size.len`.
1937 pub fn edit_for_indent_size_adjustment(
1938 row: u32,
1939 current_size: IndentSize,
1940 new_size: IndentSize,
1941 ) -> Option<(Range<Point>, String)> {
1942 if new_size.kind == current_size.kind {
1943 match new_size.len.cmp(¤t_size.len) {
1944 Ordering::Greater => {
1945 let point = Point::new(row, 0);
1946 Some((
1947 point..point,
1948 iter::repeat(new_size.char())
1949 .take((new_size.len - current_size.len) as usize)
1950 .collect::<String>(),
1951 ))
1952 }
1953
1954 Ordering::Less => Some((
1955 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
1956 String::new(),
1957 )),
1958
1959 Ordering::Equal => None,
1960 }
1961 } else {
1962 Some((
1963 Point::new(row, 0)..Point::new(row, current_size.len),
1964 iter::repeat(new_size.char())
1965 .take(new_size.len as usize)
1966 .collect::<String>(),
1967 ))
1968 }
1969 }
1970
1971 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
1972 /// and the given new text.
1973 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
1974 let old_text = self.as_rope().clone();
1975 let base_version = self.version();
1976 cx.background_executor()
1977 .spawn_labeled(*BUFFER_DIFF_TASK, async move {
1978 let old_text = old_text.to_string();
1979 let line_ending = LineEnding::detect(&new_text);
1980 LineEnding::normalize(&mut new_text);
1981 let edits = text_diff(&old_text, &new_text);
1982 Diff {
1983 base_version,
1984 line_ending,
1985 edits,
1986 }
1987 })
1988 }
1989
1990 /// Spawns a background task that searches the buffer for any whitespace
1991 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
1992 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
1993 let old_text = self.as_rope().clone();
1994 let line_ending = self.line_ending();
1995 let base_version = self.version();
1996 cx.background_spawn(async move {
1997 let ranges = trailing_whitespace_ranges(&old_text);
1998 let empty = Arc::<str>::from("");
1999 Diff {
2000 base_version,
2001 line_ending,
2002 edits: ranges
2003 .into_iter()
2004 .map(|range| (range, empty.clone()))
2005 .collect(),
2006 }
2007 })
2008 }
2009
2010 /// Ensures that the buffer ends with a single newline character, and
2011 /// no other whitespace. Skips if the buffer is empty.
2012 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2013 let len = self.len();
2014 if len == 0 {
2015 return;
2016 }
2017 let mut offset = len;
2018 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2019 let non_whitespace_len = chunk
2020 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2021 .len();
2022 offset -= chunk.len();
2023 offset += non_whitespace_len;
2024 if non_whitespace_len != 0 {
2025 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2026 return;
2027 }
2028 break;
2029 }
2030 }
2031 self.edit([(offset..len, "\n")], None, cx);
2032 }
2033
2034 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2035 /// calculated, then adjust the diff to account for those changes, and discard any
2036 /// parts of the diff that conflict with those changes.
2037 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2038 let snapshot = self.snapshot();
2039 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2040 let mut delta = 0;
2041 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2042 while let Some(edit_since) = edits_since.peek() {
2043 // If the edit occurs after a diff hunk, then it does not
2044 // affect that hunk.
2045 if edit_since.old.start > range.end {
2046 break;
2047 }
2048 // If the edit precedes the diff hunk, then adjust the hunk
2049 // to reflect the edit.
2050 else if edit_since.old.end < range.start {
2051 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2052 edits_since.next();
2053 }
2054 // If the edit intersects a diff hunk, then discard that hunk.
2055 else {
2056 return None;
2057 }
2058 }
2059
2060 let start = (range.start as i64 + delta) as usize;
2061 let end = (range.end as i64 + delta) as usize;
2062 Some((start..end, new_text))
2063 });
2064
2065 self.start_transaction();
2066 self.text.set_line_ending(diff.line_ending);
2067 self.edit(adjusted_edits, None, cx);
2068 self.end_transaction(cx)
2069 }
2070
2071 pub fn has_unsaved_edits(&self) -> bool {
2072 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2073
2074 if last_version == self.version {
2075 self.has_unsaved_edits
2076 .set((last_version, has_unsaved_edits));
2077 return has_unsaved_edits;
2078 }
2079
2080 let has_edits = self.has_edits_since(&self.saved_version);
2081 self.has_unsaved_edits
2082 .set((self.version.clone(), has_edits));
2083 has_edits
2084 }
2085
2086 /// Checks if the buffer has unsaved changes.
2087 pub fn is_dirty(&self) -> bool {
2088 if self.capability == Capability::ReadOnly {
2089 return false;
2090 }
2091 if self.has_conflict {
2092 return true;
2093 }
2094 match self.file.as_ref().map(|f| f.disk_state()) {
2095 Some(DiskState::New) | Some(DiskState::Deleted) => {
2096 !self.is_empty() && self.has_unsaved_edits()
2097 }
2098 _ => self.has_unsaved_edits(),
2099 }
2100 }
2101
2102 /// Checks if the buffer and its file have both changed since the buffer
2103 /// was last saved or reloaded.
2104 pub fn has_conflict(&self) -> bool {
2105 if self.has_conflict {
2106 return true;
2107 }
2108 let Some(file) = self.file.as_ref() else {
2109 return false;
2110 };
2111 match file.disk_state() {
2112 DiskState::New => false,
2113 DiskState::Present { mtime } => match self.saved_mtime {
2114 Some(saved_mtime) => {
2115 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2116 }
2117 None => true,
2118 },
2119 DiskState::Deleted => false,
2120 }
2121 }
2122
2123 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2124 pub fn subscribe(&mut self) -> Subscription {
2125 self.text.subscribe()
2126 }
2127
2128 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2129 ///
2130 /// This allows downstream code to check if the buffer's text has changed without
2131 /// waiting for an effect cycle, which would be required if using eents.
2132 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2133 if let Err(ix) = self
2134 .change_bits
2135 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2136 {
2137 self.change_bits.insert(ix, bit);
2138 }
2139 }
2140
2141 /// Set the change bit for all "listeners".
2142 fn was_changed(&mut self) {
2143 self.change_bits.retain(|change_bit| {
2144 change_bit
2145 .upgrade()
2146 .inspect(|bit| {
2147 _ = bit.replace(true);
2148 })
2149 .is_some()
2150 });
2151 }
2152
2153 /// Starts a transaction, if one is not already in-progress. When undoing or
2154 /// redoing edits, all of the edits performed within a transaction are undone
2155 /// or redone together.
2156 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2157 self.start_transaction_at(Instant::now())
2158 }
2159
2160 /// Starts a transaction, providing the current time. Subsequent transactions
2161 /// that occur within a short period of time will be grouped together. This
2162 /// is controlled by the buffer's undo grouping duration.
2163 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2164 self.transaction_depth += 1;
2165 if self.was_dirty_before_starting_transaction.is_none() {
2166 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2167 }
2168 self.text.start_transaction_at(now)
2169 }
2170
2171 /// Terminates the current transaction, if this is the outermost transaction.
2172 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2173 self.end_transaction_at(Instant::now(), cx)
2174 }
2175
2176 /// Terminates the current transaction, providing the current time. Subsequent transactions
2177 /// that occur within a short period of time will be grouped together. This
2178 /// is controlled by the buffer's undo grouping duration.
2179 pub fn end_transaction_at(
2180 &mut self,
2181 now: Instant,
2182 cx: &mut Context<Self>,
2183 ) -> Option<TransactionId> {
2184 assert!(self.transaction_depth > 0);
2185 self.transaction_depth -= 1;
2186 let was_dirty = if self.transaction_depth == 0 {
2187 self.was_dirty_before_starting_transaction.take().unwrap()
2188 } else {
2189 false
2190 };
2191 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2192 self.did_edit(&start_version, was_dirty, cx);
2193 Some(transaction_id)
2194 } else {
2195 None
2196 }
2197 }
2198
2199 /// Manually add a transaction to the buffer's undo history.
2200 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2201 self.text.push_transaction(transaction, now);
2202 }
2203
2204 /// Differs from `push_transaction` in that it does not clear the redo
2205 /// stack. Intended to be used to create a parent transaction to merge
2206 /// potential child transactions into.
2207 ///
2208 /// The caller is responsible for removing it from the undo history using
2209 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2210 /// are merged into this transaction, the caller is responsible for ensuring
2211 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2212 /// cleared is to create transactions with the usual `start_transaction` and
2213 /// `end_transaction` methods and merging the resulting transactions into
2214 /// the transaction created by this method
2215 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2216 self.text.push_empty_transaction(now)
2217 }
2218
2219 /// Prevent the last transaction from being grouped with any subsequent transactions,
2220 /// even if they occur with the buffer's undo grouping duration.
2221 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2222 self.text.finalize_last_transaction()
2223 }
2224
2225 /// Manually group all changes since a given transaction.
2226 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2227 self.text.group_until_transaction(transaction_id);
2228 }
2229
2230 /// Manually remove a transaction from the buffer's undo history
2231 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2232 self.text.forget_transaction(transaction_id)
2233 }
2234
2235 /// Retrieve a transaction from the buffer's undo history
2236 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2237 self.text.get_transaction(transaction_id)
2238 }
2239
2240 /// Manually merge two transactions in the buffer's undo history.
2241 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2242 self.text.merge_transactions(transaction, destination);
2243 }
2244
2245 /// Waits for the buffer to receive operations with the given timestamps.
2246 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2247 &mut self,
2248 edit_ids: It,
2249 ) -> impl Future<Output = Result<()>> + use<It> {
2250 self.text.wait_for_edits(edit_ids)
2251 }
2252
2253 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2254 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2255 &mut self,
2256 anchors: It,
2257 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2258 self.text.wait_for_anchors(anchors)
2259 }
2260
2261 /// Waits for the buffer to receive operations up to the given version.
2262 pub fn wait_for_version(
2263 &mut self,
2264 version: clock::Global,
2265 ) -> impl Future<Output = Result<()>> + use<> {
2266 self.text.wait_for_version(version)
2267 }
2268
2269 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2270 /// [`Buffer::wait_for_version`] to resolve with an error.
2271 pub fn give_up_waiting(&mut self) {
2272 self.text.give_up_waiting();
2273 }
2274
2275 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2276 let mut rx = None;
2277 if !self.autoindent_requests.is_empty() {
2278 let channel = oneshot::channel();
2279 self.wait_for_autoindent_txs.push(channel.0);
2280 rx = Some(channel.1);
2281 }
2282 rx
2283 }
2284
2285 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2286 pub fn set_active_selections(
2287 &mut self,
2288 selections: Arc<[Selection<Anchor>]>,
2289 line_mode: bool,
2290 cursor_shape: CursorShape,
2291 cx: &mut Context<Self>,
2292 ) {
2293 let lamport_timestamp = self.text.lamport_clock.tick();
2294 self.remote_selections.insert(
2295 self.text.replica_id(),
2296 SelectionSet {
2297 selections: selections.clone(),
2298 lamport_timestamp,
2299 line_mode,
2300 cursor_shape,
2301 },
2302 );
2303 self.send_operation(
2304 Operation::UpdateSelections {
2305 selections,
2306 line_mode,
2307 lamport_timestamp,
2308 cursor_shape,
2309 },
2310 true,
2311 cx,
2312 );
2313 self.non_text_state_update_count += 1;
2314 cx.notify();
2315 }
2316
2317 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2318 /// this replica.
2319 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2320 if self
2321 .remote_selections
2322 .get(&self.text.replica_id())
2323 .is_none_or(|set| !set.selections.is_empty())
2324 {
2325 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2326 }
2327 }
2328
2329 pub fn set_agent_selections(
2330 &mut self,
2331 selections: Arc<[Selection<Anchor>]>,
2332 line_mode: bool,
2333 cursor_shape: CursorShape,
2334 cx: &mut Context<Self>,
2335 ) {
2336 let lamport_timestamp = self.text.lamport_clock.tick();
2337 self.remote_selections.insert(
2338 ReplicaId::AGENT,
2339 SelectionSet {
2340 selections,
2341 lamport_timestamp,
2342 line_mode,
2343 cursor_shape,
2344 },
2345 );
2346 self.non_text_state_update_count += 1;
2347 cx.notify();
2348 }
2349
2350 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2351 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2352 }
2353
2354 /// Replaces the buffer's entire text.
2355 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2356 where
2357 T: Into<Arc<str>>,
2358 {
2359 self.autoindent_requests.clear();
2360 self.edit([(0..self.len(), text)], None, cx)
2361 }
2362
2363 /// Appends the given text to the end of the buffer.
2364 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2365 where
2366 T: Into<Arc<str>>,
2367 {
2368 self.edit([(self.len()..self.len(), text)], None, cx)
2369 }
2370
2371 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2372 /// delete, and a string of text to insert at that location.
2373 ///
2374 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2375 /// request for the edited ranges, which will be processed when the buffer finishes
2376 /// parsing.
2377 ///
2378 /// Parsing takes place at the end of a transaction, and may compute synchronously
2379 /// or asynchronously, depending on the changes.
2380 pub fn edit<I, S, T>(
2381 &mut self,
2382 edits_iter: I,
2383 autoindent_mode: Option<AutoindentMode>,
2384 cx: &mut Context<Self>,
2385 ) -> Option<clock::Lamport>
2386 where
2387 I: IntoIterator<Item = (Range<S>, T)>,
2388 S: ToOffset,
2389 T: Into<Arc<str>>,
2390 {
2391 // Skip invalid edits and coalesce contiguous ones.
2392 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2393
2394 for (range, new_text) in edits_iter {
2395 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2396
2397 if range.start > range.end {
2398 mem::swap(&mut range.start, &mut range.end);
2399 }
2400 let new_text = new_text.into();
2401 if !new_text.is_empty() || !range.is_empty() {
2402 if let Some((prev_range, prev_text)) = edits.last_mut()
2403 && prev_range.end >= range.start
2404 {
2405 prev_range.end = cmp::max(prev_range.end, range.end);
2406 *prev_text = format!("{prev_text}{new_text}").into();
2407 } else {
2408 edits.push((range, new_text));
2409 }
2410 }
2411 }
2412 if edits.is_empty() {
2413 return None;
2414 }
2415
2416 self.start_transaction();
2417 self.pending_autoindent.take();
2418 let autoindent_request = autoindent_mode
2419 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2420
2421 let edit_operation = self.text.edit(edits.iter().cloned());
2422 let edit_id = edit_operation.timestamp();
2423
2424 if let Some((before_edit, mode)) = autoindent_request {
2425 let mut delta = 0isize;
2426 let mut previous_setting = None;
2427 let entries: Vec<_> = edits
2428 .into_iter()
2429 .enumerate()
2430 .zip(&edit_operation.as_edit().unwrap().new_text)
2431 .filter(|((_, (range, _)), _)| {
2432 let language = before_edit.language_at(range.start);
2433 let language_id = language.map(|l| l.id());
2434 if let Some((cached_language_id, auto_indent)) = previous_setting
2435 && cached_language_id == language_id
2436 {
2437 auto_indent
2438 } else {
2439 // The auto-indent setting is not present in editorconfigs, hence
2440 // we can avoid passing the file here.
2441 let auto_indent =
2442 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2443 previous_setting = Some((language_id, auto_indent));
2444 auto_indent
2445 }
2446 })
2447 .map(|((ix, (range, _)), new_text)| {
2448 let new_text_length = new_text.len();
2449 let old_start = range.start.to_point(&before_edit);
2450 let new_start = (delta + range.start as isize) as usize;
2451 let range_len = range.end - range.start;
2452 delta += new_text_length as isize - range_len as isize;
2453
2454 // Decide what range of the insertion to auto-indent, and whether
2455 // the first line of the insertion should be considered a newly-inserted line
2456 // or an edit to an existing line.
2457 let mut range_of_insertion_to_indent = 0..new_text_length;
2458 let mut first_line_is_new = true;
2459
2460 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2461 let old_line_end = before_edit.line_len(old_start.row);
2462
2463 if old_start.column > old_line_start {
2464 first_line_is_new = false;
2465 }
2466
2467 if !new_text.contains('\n')
2468 && (old_start.column + (range_len as u32) < old_line_end
2469 || old_line_end == old_line_start)
2470 {
2471 first_line_is_new = false;
2472 }
2473
2474 // When inserting text starting with a newline, avoid auto-indenting the
2475 // previous line.
2476 if new_text.starts_with('\n') {
2477 range_of_insertion_to_indent.start += 1;
2478 first_line_is_new = true;
2479 }
2480
2481 let mut original_indent_column = None;
2482 if let AutoindentMode::Block {
2483 original_indent_columns,
2484 } = &mode
2485 {
2486 original_indent_column = Some(if new_text.starts_with('\n') {
2487 indent_size_for_text(
2488 new_text[range_of_insertion_to_indent.clone()].chars(),
2489 )
2490 .len
2491 } else {
2492 original_indent_columns
2493 .get(ix)
2494 .copied()
2495 .flatten()
2496 .unwrap_or_else(|| {
2497 indent_size_for_text(
2498 new_text[range_of_insertion_to_indent.clone()].chars(),
2499 )
2500 .len
2501 })
2502 });
2503
2504 // Avoid auto-indenting the line after the edit.
2505 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2506 range_of_insertion_to_indent.end -= 1;
2507 }
2508 }
2509
2510 AutoindentRequestEntry {
2511 first_line_is_new,
2512 original_indent_column,
2513 indent_size: before_edit.language_indent_size_at(range.start, cx),
2514 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2515 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2516 }
2517 })
2518 .collect();
2519
2520 if !entries.is_empty() {
2521 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2522 before_edit,
2523 entries,
2524 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2525 ignore_empty_lines: false,
2526 }));
2527 }
2528 }
2529
2530 self.end_transaction(cx);
2531 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2532 Some(edit_id)
2533 }
2534
2535 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2536 self.was_changed();
2537
2538 if self.edits_since::<usize>(old_version).next().is_none() {
2539 return;
2540 }
2541
2542 self.reparse(cx);
2543 cx.emit(BufferEvent::Edited);
2544 if was_dirty != self.is_dirty() {
2545 cx.emit(BufferEvent::DirtyChanged);
2546 }
2547 cx.notify();
2548 }
2549
2550 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2551 where
2552 I: IntoIterator<Item = Range<T>>,
2553 T: ToOffset + Copy,
2554 {
2555 let before_edit = self.snapshot();
2556 let entries = ranges
2557 .into_iter()
2558 .map(|range| AutoindentRequestEntry {
2559 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2560 first_line_is_new: true,
2561 indent_size: before_edit.language_indent_size_at(range.start, cx),
2562 original_indent_column: None,
2563 })
2564 .collect();
2565 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2566 before_edit,
2567 entries,
2568 is_block_mode: false,
2569 ignore_empty_lines: true,
2570 }));
2571 self.request_autoindent(cx);
2572 }
2573
2574 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2575 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2576 pub fn insert_empty_line(
2577 &mut self,
2578 position: impl ToPoint,
2579 space_above: bool,
2580 space_below: bool,
2581 cx: &mut Context<Self>,
2582 ) -> Point {
2583 let mut position = position.to_point(self);
2584
2585 self.start_transaction();
2586
2587 self.edit(
2588 [(position..position, "\n")],
2589 Some(AutoindentMode::EachLine),
2590 cx,
2591 );
2592
2593 if position.column > 0 {
2594 position += Point::new(1, 0);
2595 }
2596
2597 if !self.is_line_blank(position.row) {
2598 self.edit(
2599 [(position..position, "\n")],
2600 Some(AutoindentMode::EachLine),
2601 cx,
2602 );
2603 }
2604
2605 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2606 self.edit(
2607 [(position..position, "\n")],
2608 Some(AutoindentMode::EachLine),
2609 cx,
2610 );
2611 position.row += 1;
2612 }
2613
2614 if space_below
2615 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2616 {
2617 self.edit(
2618 [(position..position, "\n")],
2619 Some(AutoindentMode::EachLine),
2620 cx,
2621 );
2622 }
2623
2624 self.end_transaction(cx);
2625
2626 position
2627 }
2628
2629 /// Applies the given remote operations to the buffer.
2630 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2631 self.pending_autoindent.take();
2632 let was_dirty = self.is_dirty();
2633 let old_version = self.version.clone();
2634 let mut deferred_ops = Vec::new();
2635 let buffer_ops = ops
2636 .into_iter()
2637 .filter_map(|op| match op {
2638 Operation::Buffer(op) => Some(op),
2639 _ => {
2640 if self.can_apply_op(&op) {
2641 self.apply_op(op, cx);
2642 } else {
2643 deferred_ops.push(op);
2644 }
2645 None
2646 }
2647 })
2648 .collect::<Vec<_>>();
2649 for operation in buffer_ops.iter() {
2650 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2651 }
2652 self.text.apply_ops(buffer_ops);
2653 self.deferred_ops.insert(deferred_ops);
2654 self.flush_deferred_ops(cx);
2655 self.did_edit(&old_version, was_dirty, cx);
2656 // Notify independently of whether the buffer was edited as the operations could include a
2657 // selection update.
2658 cx.notify();
2659 }
2660
2661 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2662 let mut deferred_ops = Vec::new();
2663 for op in self.deferred_ops.drain().iter().cloned() {
2664 if self.can_apply_op(&op) {
2665 self.apply_op(op, cx);
2666 } else {
2667 deferred_ops.push(op);
2668 }
2669 }
2670 self.deferred_ops.insert(deferred_ops);
2671 }
2672
2673 pub fn has_deferred_ops(&self) -> bool {
2674 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2675 }
2676
2677 fn can_apply_op(&self, operation: &Operation) -> bool {
2678 match operation {
2679 Operation::Buffer(_) => {
2680 unreachable!("buffer operations should never be applied at this layer")
2681 }
2682 Operation::UpdateDiagnostics {
2683 diagnostics: diagnostic_set,
2684 ..
2685 } => diagnostic_set.iter().all(|diagnostic| {
2686 self.text.can_resolve(&diagnostic.range.start)
2687 && self.text.can_resolve(&diagnostic.range.end)
2688 }),
2689 Operation::UpdateSelections { selections, .. } => selections
2690 .iter()
2691 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2692 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2693 }
2694 }
2695
2696 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2697 match operation {
2698 Operation::Buffer(_) => {
2699 unreachable!("buffer operations should never be applied at this layer")
2700 }
2701 Operation::UpdateDiagnostics {
2702 server_id,
2703 diagnostics: diagnostic_set,
2704 lamport_timestamp,
2705 } => {
2706 let snapshot = self.snapshot();
2707 self.apply_diagnostic_update(
2708 server_id,
2709 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2710 lamport_timestamp,
2711 cx,
2712 );
2713 }
2714 Operation::UpdateSelections {
2715 selections,
2716 lamport_timestamp,
2717 line_mode,
2718 cursor_shape,
2719 } => {
2720 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2721 && set.lamport_timestamp > lamport_timestamp
2722 {
2723 return;
2724 }
2725
2726 self.remote_selections.insert(
2727 lamport_timestamp.replica_id,
2728 SelectionSet {
2729 selections,
2730 lamport_timestamp,
2731 line_mode,
2732 cursor_shape,
2733 },
2734 );
2735 self.text.lamport_clock.observe(lamport_timestamp);
2736 self.non_text_state_update_count += 1;
2737 }
2738 Operation::UpdateCompletionTriggers {
2739 triggers,
2740 lamport_timestamp,
2741 server_id,
2742 } => {
2743 if triggers.is_empty() {
2744 self.completion_triggers_per_language_server
2745 .remove(&server_id);
2746 self.completion_triggers = self
2747 .completion_triggers_per_language_server
2748 .values()
2749 .flat_map(|triggers| triggers.iter().cloned())
2750 .collect();
2751 } else {
2752 self.completion_triggers_per_language_server
2753 .insert(server_id, triggers.iter().cloned().collect());
2754 self.completion_triggers.extend(triggers);
2755 }
2756 self.text.lamport_clock.observe(lamport_timestamp);
2757 }
2758 Operation::UpdateLineEnding {
2759 line_ending,
2760 lamport_timestamp,
2761 } => {
2762 self.text.set_line_ending(line_ending);
2763 self.text.lamport_clock.observe(lamport_timestamp);
2764 }
2765 }
2766 }
2767
2768 fn apply_diagnostic_update(
2769 &mut self,
2770 server_id: LanguageServerId,
2771 diagnostics: DiagnosticSet,
2772 lamport_timestamp: clock::Lamport,
2773 cx: &mut Context<Self>,
2774 ) {
2775 if lamport_timestamp > self.diagnostics_timestamp {
2776 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
2777 if diagnostics.is_empty() {
2778 if let Ok(ix) = ix {
2779 self.diagnostics.remove(ix);
2780 }
2781 } else {
2782 match ix {
2783 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
2784 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
2785 };
2786 }
2787 self.diagnostics_timestamp = lamport_timestamp;
2788 self.non_text_state_update_count += 1;
2789 self.text.lamport_clock.observe(lamport_timestamp);
2790 cx.notify();
2791 cx.emit(BufferEvent::DiagnosticsUpdated);
2792 }
2793 }
2794
2795 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2796 self.was_changed();
2797 cx.emit(BufferEvent::Operation {
2798 operation,
2799 is_local,
2800 });
2801 }
2802
2803 /// Removes the selections for a given peer.
2804 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2805 self.remote_selections.remove(&replica_id);
2806 cx.notify();
2807 }
2808
2809 /// Undoes the most recent transaction.
2810 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2811 let was_dirty = self.is_dirty();
2812 let old_version = self.version.clone();
2813
2814 if let Some((transaction_id, operation)) = self.text.undo() {
2815 self.send_operation(Operation::Buffer(operation), true, cx);
2816 self.did_edit(&old_version, was_dirty, cx);
2817 Some(transaction_id)
2818 } else {
2819 None
2820 }
2821 }
2822
2823 /// Manually undoes a specific transaction in the buffer's undo history.
2824 pub fn undo_transaction(
2825 &mut self,
2826 transaction_id: TransactionId,
2827 cx: &mut Context<Self>,
2828 ) -> bool {
2829 let was_dirty = self.is_dirty();
2830 let old_version = self.version.clone();
2831 if let Some(operation) = self.text.undo_transaction(transaction_id) {
2832 self.send_operation(Operation::Buffer(operation), true, cx);
2833 self.did_edit(&old_version, was_dirty, cx);
2834 true
2835 } else {
2836 false
2837 }
2838 }
2839
2840 /// Manually undoes all changes after a given transaction in the buffer's undo history.
2841 pub fn undo_to_transaction(
2842 &mut self,
2843 transaction_id: TransactionId,
2844 cx: &mut Context<Self>,
2845 ) -> bool {
2846 let was_dirty = self.is_dirty();
2847 let old_version = self.version.clone();
2848
2849 let operations = self.text.undo_to_transaction(transaction_id);
2850 let undone = !operations.is_empty();
2851 for operation in operations {
2852 self.send_operation(Operation::Buffer(operation), true, cx);
2853 }
2854 if undone {
2855 self.did_edit(&old_version, was_dirty, cx)
2856 }
2857 undone
2858 }
2859
2860 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
2861 let was_dirty = self.is_dirty();
2862 let operation = self.text.undo_operations(counts);
2863 let old_version = self.version.clone();
2864 self.send_operation(Operation::Buffer(operation), true, cx);
2865 self.did_edit(&old_version, was_dirty, cx);
2866 }
2867
2868 /// Manually redoes a specific transaction in the buffer's redo history.
2869 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2870 let was_dirty = self.is_dirty();
2871 let old_version = self.version.clone();
2872
2873 if let Some((transaction_id, operation)) = self.text.redo() {
2874 self.send_operation(Operation::Buffer(operation), true, cx);
2875 self.did_edit(&old_version, was_dirty, cx);
2876 Some(transaction_id)
2877 } else {
2878 None
2879 }
2880 }
2881
2882 /// Manually undoes all changes until a given transaction in the buffer's redo history.
2883 pub fn redo_to_transaction(
2884 &mut self,
2885 transaction_id: TransactionId,
2886 cx: &mut Context<Self>,
2887 ) -> bool {
2888 let was_dirty = self.is_dirty();
2889 let old_version = self.version.clone();
2890
2891 let operations = self.text.redo_to_transaction(transaction_id);
2892 let redone = !operations.is_empty();
2893 for operation in operations {
2894 self.send_operation(Operation::Buffer(operation), true, cx);
2895 }
2896 if redone {
2897 self.did_edit(&old_version, was_dirty, cx)
2898 }
2899 redone
2900 }
2901
2902 /// Override current completion triggers with the user-provided completion triggers.
2903 pub fn set_completion_triggers(
2904 &mut self,
2905 server_id: LanguageServerId,
2906 triggers: BTreeSet<String>,
2907 cx: &mut Context<Self>,
2908 ) {
2909 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
2910 if triggers.is_empty() {
2911 self.completion_triggers_per_language_server
2912 .remove(&server_id);
2913 self.completion_triggers = self
2914 .completion_triggers_per_language_server
2915 .values()
2916 .flat_map(|triggers| triggers.iter().cloned())
2917 .collect();
2918 } else {
2919 self.completion_triggers_per_language_server
2920 .insert(server_id, triggers.clone());
2921 self.completion_triggers.extend(triggers.iter().cloned());
2922 }
2923 self.send_operation(
2924 Operation::UpdateCompletionTriggers {
2925 triggers: triggers.into_iter().collect(),
2926 lamport_timestamp: self.completion_triggers_timestamp,
2927 server_id,
2928 },
2929 true,
2930 cx,
2931 );
2932 cx.notify();
2933 }
2934
2935 /// Returns a list of strings which trigger a completion menu for this language.
2936 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
2937 pub fn completion_triggers(&self) -> &BTreeSet<String> {
2938 &self.completion_triggers
2939 }
2940
2941 /// Call this directly after performing edits to prevent the preview tab
2942 /// from being dismissed by those edits. It causes `should_dismiss_preview`
2943 /// to return false until there are additional edits.
2944 pub fn refresh_preview(&mut self) {
2945 self.preview_version = self.version.clone();
2946 }
2947
2948 /// Whether we should preserve the preview status of a tab containing this buffer.
2949 pub fn preserve_preview(&self) -> bool {
2950 !self.has_edits_since(&self.preview_version)
2951 }
2952}
2953
2954#[doc(hidden)]
2955#[cfg(any(test, feature = "test-support"))]
2956impl Buffer {
2957 pub fn edit_via_marked_text(
2958 &mut self,
2959 marked_string: &str,
2960 autoindent_mode: Option<AutoindentMode>,
2961 cx: &mut Context<Self>,
2962 ) {
2963 let edits = self.edits_for_marked_text(marked_string);
2964 self.edit(edits, autoindent_mode, cx);
2965 }
2966
2967 pub fn set_group_interval(&mut self, group_interval: Duration) {
2968 self.text.set_group_interval(group_interval);
2969 }
2970
2971 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
2972 where
2973 T: rand::Rng,
2974 {
2975 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
2976 let mut last_end = None;
2977 for _ in 0..old_range_count {
2978 if last_end.is_some_and(|last_end| last_end >= self.len()) {
2979 break;
2980 }
2981
2982 let new_start = last_end.map_or(0, |last_end| last_end + 1);
2983 let mut range = self.random_byte_range(new_start, rng);
2984 if rng.random_bool(0.2) {
2985 mem::swap(&mut range.start, &mut range.end);
2986 }
2987 last_end = Some(range.end);
2988
2989 let new_text_len = rng.random_range(0..10);
2990 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
2991 new_text = new_text.to_uppercase();
2992
2993 edits.push((range, new_text));
2994 }
2995 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
2996 self.edit(edits, None, cx);
2997 }
2998
2999 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3000 let was_dirty = self.is_dirty();
3001 let old_version = self.version.clone();
3002
3003 let ops = self.text.randomly_undo_redo(rng);
3004 if !ops.is_empty() {
3005 for op in ops {
3006 self.send_operation(Operation::Buffer(op), true, cx);
3007 self.did_edit(&old_version, was_dirty, cx);
3008 }
3009 }
3010 }
3011}
3012
3013impl EventEmitter<BufferEvent> for Buffer {}
3014
3015impl Deref for Buffer {
3016 type Target = TextBuffer;
3017
3018 fn deref(&self) -> &Self::Target {
3019 &self.text
3020 }
3021}
3022
3023impl BufferSnapshot {
3024 /// Returns [`IndentSize`] for a given line that respects user settings and
3025 /// language preferences.
3026 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3027 indent_size_for_line(self, row)
3028 }
3029
3030 /// Returns [`IndentSize`] for a given position that respects user settings
3031 /// and language preferences.
3032 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3033 let settings = language_settings(
3034 self.language_at(position).map(|l| l.name()),
3035 self.file(),
3036 cx,
3037 );
3038 if settings.hard_tabs {
3039 IndentSize::tab()
3040 } else {
3041 IndentSize::spaces(settings.tab_size.get())
3042 }
3043 }
3044
3045 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3046 /// is passed in as `single_indent_size`.
3047 pub fn suggested_indents(
3048 &self,
3049 rows: impl Iterator<Item = u32>,
3050 single_indent_size: IndentSize,
3051 ) -> BTreeMap<u32, IndentSize> {
3052 let mut result = BTreeMap::new();
3053
3054 for row_range in contiguous_ranges(rows, 10) {
3055 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3056 Some(suggestions) => suggestions,
3057 _ => break,
3058 };
3059
3060 for (row, suggestion) in row_range.zip(suggestions) {
3061 let indent_size = if let Some(suggestion) = suggestion {
3062 result
3063 .get(&suggestion.basis_row)
3064 .copied()
3065 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3066 .with_delta(suggestion.delta, single_indent_size)
3067 } else {
3068 self.indent_size_for_line(row)
3069 };
3070
3071 result.insert(row, indent_size);
3072 }
3073 }
3074
3075 result
3076 }
3077
3078 fn suggest_autoindents(
3079 &self,
3080 row_range: Range<u32>,
3081 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3082 let config = &self.language.as_ref()?.config;
3083 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3084
3085 #[derive(Debug, Clone)]
3086 struct StartPosition {
3087 start: Point,
3088 suffix: SharedString,
3089 }
3090
3091 // Find the suggested indentation ranges based on the syntax tree.
3092 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3093 let end = Point::new(row_range.end, 0);
3094 let range = (start..end).to_offset(&self.text);
3095 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3096 Some(&grammar.indents_config.as_ref()?.query)
3097 });
3098 let indent_configs = matches
3099 .grammars()
3100 .iter()
3101 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3102 .collect::<Vec<_>>();
3103
3104 let mut indent_ranges = Vec::<Range<Point>>::new();
3105 let mut start_positions = Vec::<StartPosition>::new();
3106 let mut outdent_positions = Vec::<Point>::new();
3107 while let Some(mat) = matches.peek() {
3108 let mut start: Option<Point> = None;
3109 let mut end: Option<Point> = None;
3110
3111 let config = indent_configs[mat.grammar_index];
3112 for capture in mat.captures {
3113 if capture.index == config.indent_capture_ix {
3114 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3115 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3116 } else if Some(capture.index) == config.start_capture_ix {
3117 start = Some(Point::from_ts_point(capture.node.end_position()));
3118 } else if Some(capture.index) == config.end_capture_ix {
3119 end = Some(Point::from_ts_point(capture.node.start_position()));
3120 } else if Some(capture.index) == config.outdent_capture_ix {
3121 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3122 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3123 start_positions.push(StartPosition {
3124 start: Point::from_ts_point(capture.node.start_position()),
3125 suffix: suffix.clone(),
3126 });
3127 }
3128 }
3129
3130 matches.advance();
3131 if let Some((start, end)) = start.zip(end) {
3132 if start.row == end.row {
3133 continue;
3134 }
3135 let range = start..end;
3136 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3137 Err(ix) => indent_ranges.insert(ix, range),
3138 Ok(ix) => {
3139 let prev_range = &mut indent_ranges[ix];
3140 prev_range.end = prev_range.end.max(range.end);
3141 }
3142 }
3143 }
3144 }
3145
3146 let mut error_ranges = Vec::<Range<Point>>::new();
3147 let mut matches = self
3148 .syntax
3149 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3150 while let Some(mat) = matches.peek() {
3151 let node = mat.captures[0].node;
3152 let start = Point::from_ts_point(node.start_position());
3153 let end = Point::from_ts_point(node.end_position());
3154 let range = start..end;
3155 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3156 Ok(ix) | Err(ix) => ix,
3157 };
3158 let mut end_ix = ix;
3159 while let Some(existing_range) = error_ranges.get(end_ix) {
3160 if existing_range.end < end {
3161 end_ix += 1;
3162 } else {
3163 break;
3164 }
3165 }
3166 error_ranges.splice(ix..end_ix, [range]);
3167 matches.advance();
3168 }
3169
3170 outdent_positions.sort();
3171 for outdent_position in outdent_positions {
3172 // find the innermost indent range containing this outdent_position
3173 // set its end to the outdent position
3174 if let Some(range_to_truncate) = indent_ranges
3175 .iter_mut()
3176 .filter(|indent_range| indent_range.contains(&outdent_position))
3177 .next_back()
3178 {
3179 range_to_truncate.end = outdent_position;
3180 }
3181 }
3182
3183 start_positions.sort_by_key(|b| b.start);
3184
3185 // Find the suggested indentation increases and decreased based on regexes.
3186 let mut regex_outdent_map = HashMap::default();
3187 let mut last_seen_suffix: HashMap<String, Vec<Point>> = HashMap::default();
3188 let mut start_positions_iter = start_positions.iter().peekable();
3189
3190 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3191 self.for_each_line(
3192 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3193 ..Point::new(row_range.end, 0),
3194 |row, line| {
3195 if config
3196 .decrease_indent_pattern
3197 .as_ref()
3198 .is_some_and(|regex| regex.is_match(line))
3199 {
3200 indent_change_rows.push((row, Ordering::Less));
3201 }
3202 if config
3203 .increase_indent_pattern
3204 .as_ref()
3205 .is_some_and(|regex| regex.is_match(line))
3206 {
3207 indent_change_rows.push((row + 1, Ordering::Greater));
3208 }
3209 while let Some(pos) = start_positions_iter.peek() {
3210 if pos.start.row < row {
3211 let pos = start_positions_iter.next().unwrap();
3212 last_seen_suffix
3213 .entry(pos.suffix.to_string())
3214 .or_default()
3215 .push(pos.start);
3216 } else {
3217 break;
3218 }
3219 }
3220 for rule in &config.decrease_indent_patterns {
3221 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3222 let row_start_column = self.indent_size_for_line(row).len;
3223 let basis_row = rule
3224 .valid_after
3225 .iter()
3226 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3227 .flatten()
3228 .filter(|start_point| start_point.column <= row_start_column)
3229 .max_by_key(|start_point| start_point.row);
3230 if let Some(outdent_to_row) = basis_row {
3231 regex_outdent_map.insert(row, outdent_to_row.row);
3232 }
3233 break;
3234 }
3235 }
3236 },
3237 );
3238
3239 let mut indent_changes = indent_change_rows.into_iter().peekable();
3240 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3241 prev_non_blank_row.unwrap_or(0)
3242 } else {
3243 row_range.start.saturating_sub(1)
3244 };
3245
3246 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3247 Some(row_range.map(move |row| {
3248 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3249
3250 let mut indent_from_prev_row = false;
3251 let mut outdent_from_prev_row = false;
3252 let mut outdent_to_row = u32::MAX;
3253 let mut from_regex = false;
3254
3255 while let Some((indent_row, delta)) = indent_changes.peek() {
3256 match indent_row.cmp(&row) {
3257 Ordering::Equal => match delta {
3258 Ordering::Less => {
3259 from_regex = true;
3260 outdent_from_prev_row = true
3261 }
3262 Ordering::Greater => {
3263 indent_from_prev_row = true;
3264 from_regex = true
3265 }
3266 _ => {}
3267 },
3268
3269 Ordering::Greater => break,
3270 Ordering::Less => {}
3271 }
3272
3273 indent_changes.next();
3274 }
3275
3276 for range in &indent_ranges {
3277 if range.start.row >= row {
3278 break;
3279 }
3280 if range.start.row == prev_row && range.end > row_start {
3281 indent_from_prev_row = true;
3282 }
3283 if range.end > prev_row_start && range.end <= row_start {
3284 outdent_to_row = outdent_to_row.min(range.start.row);
3285 }
3286 }
3287
3288 if let Some(basis_row) = regex_outdent_map.get(&row) {
3289 indent_from_prev_row = false;
3290 outdent_to_row = *basis_row;
3291 from_regex = true;
3292 }
3293
3294 let within_error = error_ranges
3295 .iter()
3296 .any(|e| e.start.row < row && e.end > row_start);
3297
3298 let suggestion = if outdent_to_row == prev_row
3299 || (outdent_from_prev_row && indent_from_prev_row)
3300 {
3301 Some(IndentSuggestion {
3302 basis_row: prev_row,
3303 delta: Ordering::Equal,
3304 within_error: within_error && !from_regex,
3305 })
3306 } else if indent_from_prev_row {
3307 Some(IndentSuggestion {
3308 basis_row: prev_row,
3309 delta: Ordering::Greater,
3310 within_error: within_error && !from_regex,
3311 })
3312 } else if outdent_to_row < prev_row {
3313 Some(IndentSuggestion {
3314 basis_row: outdent_to_row,
3315 delta: Ordering::Equal,
3316 within_error: within_error && !from_regex,
3317 })
3318 } else if outdent_from_prev_row {
3319 Some(IndentSuggestion {
3320 basis_row: prev_row,
3321 delta: Ordering::Less,
3322 within_error: within_error && !from_regex,
3323 })
3324 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3325 {
3326 Some(IndentSuggestion {
3327 basis_row: prev_row,
3328 delta: Ordering::Equal,
3329 within_error: within_error && !from_regex,
3330 })
3331 } else {
3332 None
3333 };
3334
3335 prev_row = row;
3336 prev_row_start = row_start;
3337 suggestion
3338 }))
3339 }
3340
3341 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3342 while row > 0 {
3343 row -= 1;
3344 if !self.is_line_blank(row) {
3345 return Some(row);
3346 }
3347 }
3348 None
3349 }
3350
3351 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3352 let captures = self.syntax.captures(range, &self.text, |grammar| {
3353 grammar
3354 .highlights_config
3355 .as_ref()
3356 .map(|config| &config.query)
3357 });
3358 let highlight_maps = captures
3359 .grammars()
3360 .iter()
3361 .map(|grammar| grammar.highlight_map())
3362 .collect();
3363 (captures, highlight_maps)
3364 }
3365
3366 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3367 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3368 /// returned in chunks where each chunk has a single syntax highlighting style and
3369 /// diagnostic status.
3370 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3371 let range = range.start.to_offset(self)..range.end.to_offset(self);
3372
3373 let mut syntax = None;
3374 if language_aware {
3375 syntax = Some(self.get_highlights(range.clone()));
3376 }
3377 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3378 let diagnostics = language_aware;
3379 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3380 }
3381
3382 pub fn highlighted_text_for_range<T: ToOffset>(
3383 &self,
3384 range: Range<T>,
3385 override_style: Option<HighlightStyle>,
3386 syntax_theme: &SyntaxTheme,
3387 ) -> HighlightedText {
3388 HighlightedText::from_buffer_range(
3389 range,
3390 &self.text,
3391 &self.syntax,
3392 override_style,
3393 syntax_theme,
3394 )
3395 }
3396
3397 /// Invokes the given callback for each line of text in the given range of the buffer.
3398 /// Uses callback to avoid allocating a string for each line.
3399 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3400 let mut line = String::new();
3401 let mut row = range.start.row;
3402 for chunk in self
3403 .as_rope()
3404 .chunks_in_range(range.to_offset(self))
3405 .chain(["\n"])
3406 {
3407 for (newline_ix, text) in chunk.split('\n').enumerate() {
3408 if newline_ix > 0 {
3409 callback(row, &line);
3410 row += 1;
3411 line.clear();
3412 }
3413 line.push_str(text);
3414 }
3415 }
3416 }
3417
3418 /// Iterates over every [`SyntaxLayer`] in the buffer.
3419 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3420 self.syntax_layers_for_range(0..self.len(), true)
3421 }
3422
3423 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3424 let offset = position.to_offset(self);
3425 self.syntax_layers_for_range(offset..offset, false)
3426 .filter(|l| {
3427 if let Some(ranges) = l.included_sub_ranges {
3428 ranges.iter().any(|range| {
3429 let start = range.start.to_offset(self);
3430 start <= offset && {
3431 let end = range.end.to_offset(self);
3432 offset < end
3433 }
3434 })
3435 } else {
3436 l.node().start_byte() <= offset && l.node().end_byte() > offset
3437 }
3438 })
3439 .last()
3440 }
3441
3442 pub fn syntax_layers_for_range<D: ToOffset>(
3443 &self,
3444 range: Range<D>,
3445 include_hidden: bool,
3446 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3447 self.syntax
3448 .layers_for_range(range, &self.text, include_hidden)
3449 }
3450
3451 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3452 &self,
3453 range: Range<D>,
3454 ) -> Option<SyntaxLayer<'_>> {
3455 let range = range.to_offset(self);
3456 self.syntax
3457 .layers_for_range(range, &self.text, false)
3458 .max_by(|a, b| {
3459 if a.depth != b.depth {
3460 a.depth.cmp(&b.depth)
3461 } else if a.offset.0 != b.offset.0 {
3462 a.offset.0.cmp(&b.offset.0)
3463 } else {
3464 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3465 }
3466 })
3467 }
3468
3469 /// Returns the main [`Language`].
3470 pub fn language(&self) -> Option<&Arc<Language>> {
3471 self.language.as_ref()
3472 }
3473
3474 /// Returns the [`Language`] at the given location.
3475 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3476 self.syntax_layer_at(position)
3477 .map(|info| info.language)
3478 .or(self.language.as_ref())
3479 }
3480
3481 /// Returns the settings for the language at the given location.
3482 pub fn settings_at<'a, D: ToOffset>(
3483 &'a self,
3484 position: D,
3485 cx: &'a App,
3486 ) -> Cow<'a, LanguageSettings> {
3487 language_settings(
3488 self.language_at(position).map(|l| l.name()),
3489 self.file.as_ref(),
3490 cx,
3491 )
3492 }
3493
3494 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3495 CharClassifier::new(self.language_scope_at(point))
3496 }
3497
3498 /// Returns the [`LanguageScope`] at the given location.
3499 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3500 let offset = position.to_offset(self);
3501 let mut scope = None;
3502 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3503
3504 // Use the layer that has the smallest node intersecting the given point.
3505 for layer in self
3506 .syntax
3507 .layers_for_range(offset..offset, &self.text, false)
3508 {
3509 let mut cursor = layer.node().walk();
3510
3511 let mut range = None;
3512 loop {
3513 let child_range = cursor.node().byte_range();
3514 if !child_range.contains(&offset) {
3515 break;
3516 }
3517
3518 range = Some(child_range);
3519 if cursor.goto_first_child_for_byte(offset).is_none() {
3520 break;
3521 }
3522 }
3523
3524 if let Some(range) = range
3525 && smallest_range_and_depth.as_ref().is_none_or(
3526 |(smallest_range, smallest_range_depth)| {
3527 if layer.depth > *smallest_range_depth {
3528 true
3529 } else if layer.depth == *smallest_range_depth {
3530 range.len() < smallest_range.len()
3531 } else {
3532 false
3533 }
3534 },
3535 )
3536 {
3537 smallest_range_and_depth = Some((range, layer.depth));
3538 scope = Some(LanguageScope {
3539 language: layer.language.clone(),
3540 override_id: layer.override_id(offset, &self.text),
3541 });
3542 }
3543 }
3544
3545 scope.or_else(|| {
3546 self.language.clone().map(|language| LanguageScope {
3547 language,
3548 override_id: None,
3549 })
3550 })
3551 }
3552
3553 /// Returns a tuple of the range and character kind of the word
3554 /// surrounding the given position.
3555 pub fn surrounding_word<T: ToOffset>(
3556 &self,
3557 start: T,
3558 scope_context: Option<CharScopeContext>,
3559 ) -> (Range<usize>, Option<CharKind>) {
3560 let mut start = start.to_offset(self);
3561 let mut end = start;
3562 let mut next_chars = self.chars_at(start).take(128).peekable();
3563 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3564
3565 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3566 let word_kind = cmp::max(
3567 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3568 next_chars.peek().copied().map(|c| classifier.kind(c)),
3569 );
3570
3571 for ch in prev_chars {
3572 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3573 start -= ch.len_utf8();
3574 } else {
3575 break;
3576 }
3577 }
3578
3579 for ch in next_chars {
3580 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3581 end += ch.len_utf8();
3582 } else {
3583 break;
3584 }
3585 }
3586
3587 (start..end, word_kind)
3588 }
3589
3590 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3591 /// range. When `require_larger` is true, the node found must be larger than the query range.
3592 ///
3593 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3594 /// be moved to the root of the tree.
3595 fn goto_node_enclosing_range(
3596 cursor: &mut tree_sitter::TreeCursor,
3597 query_range: &Range<usize>,
3598 require_larger: bool,
3599 ) -> bool {
3600 let mut ascending = false;
3601 loop {
3602 let mut range = cursor.node().byte_range();
3603 if query_range.is_empty() {
3604 // When the query range is empty and the current node starts after it, move to the
3605 // previous sibling to find the node the containing node.
3606 if range.start > query_range.start {
3607 cursor.goto_previous_sibling();
3608 range = cursor.node().byte_range();
3609 }
3610 } else {
3611 // When the query range is non-empty and the current node ends exactly at the start,
3612 // move to the next sibling to find a node that extends beyond the start.
3613 if range.end == query_range.start {
3614 cursor.goto_next_sibling();
3615 range = cursor.node().byte_range();
3616 }
3617 }
3618
3619 let encloses = range.contains_inclusive(query_range)
3620 && (!require_larger || range.len() > query_range.len());
3621 if !encloses {
3622 ascending = true;
3623 if !cursor.goto_parent() {
3624 return false;
3625 }
3626 continue;
3627 } else if ascending {
3628 return true;
3629 }
3630
3631 // Descend into the current node.
3632 if cursor
3633 .goto_first_child_for_byte(query_range.start)
3634 .is_none()
3635 {
3636 return true;
3637 }
3638 }
3639 }
3640
3641 pub fn syntax_ancestor<'a, T: ToOffset>(
3642 &'a self,
3643 range: Range<T>,
3644 ) -> Option<tree_sitter::Node<'a>> {
3645 let range = range.start.to_offset(self)..range.end.to_offset(self);
3646 let mut result: Option<tree_sitter::Node<'a>> = None;
3647 for layer in self
3648 .syntax
3649 .layers_for_range(range.clone(), &self.text, true)
3650 {
3651 let mut cursor = layer.node().walk();
3652
3653 // Find the node that both contains the range and is larger than it.
3654 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3655 continue;
3656 }
3657
3658 let left_node = cursor.node();
3659 let mut layer_result = left_node;
3660
3661 // For an empty range, try to find another node immediately to the right of the range.
3662 if left_node.end_byte() == range.start {
3663 let mut right_node = None;
3664 while !cursor.goto_next_sibling() {
3665 if !cursor.goto_parent() {
3666 break;
3667 }
3668 }
3669
3670 while cursor.node().start_byte() == range.start {
3671 right_node = Some(cursor.node());
3672 if !cursor.goto_first_child() {
3673 break;
3674 }
3675 }
3676
3677 // If there is a candidate node on both sides of the (empty) range, then
3678 // decide between the two by favoring a named node over an anonymous token.
3679 // If both nodes are the same in that regard, favor the right one.
3680 if let Some(right_node) = right_node
3681 && (right_node.is_named() || !left_node.is_named())
3682 {
3683 layer_result = right_node;
3684 }
3685 }
3686
3687 if let Some(previous_result) = &result
3688 && previous_result.byte_range().len() < layer_result.byte_range().len()
3689 {
3690 continue;
3691 }
3692 result = Some(layer_result);
3693 }
3694
3695 result
3696 }
3697
3698 /// Find the previous sibling syntax node at the given range.
3699 ///
3700 /// This function locates the syntax node that precedes the node containing
3701 /// the given range. It searches hierarchically by:
3702 /// 1. Finding the node that contains the given range
3703 /// 2. Looking for the previous sibling at the same tree level
3704 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3705 ///
3706 /// Returns `None` if there is no previous sibling at any ancestor level.
3707 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3708 &'a self,
3709 range: Range<T>,
3710 ) -> Option<tree_sitter::Node<'a>> {
3711 let range = range.start.to_offset(self)..range.end.to_offset(self);
3712 let mut result: Option<tree_sitter::Node<'a>> = None;
3713
3714 for layer in self
3715 .syntax
3716 .layers_for_range(range.clone(), &self.text, true)
3717 {
3718 let mut cursor = layer.node().walk();
3719
3720 // Find the node that contains the range
3721 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3722 continue;
3723 }
3724
3725 // Look for the previous sibling, moving up ancestor levels if needed
3726 loop {
3727 if cursor.goto_previous_sibling() {
3728 let layer_result = cursor.node();
3729
3730 if let Some(previous_result) = &result {
3731 if previous_result.byte_range().end < layer_result.byte_range().end {
3732 continue;
3733 }
3734 }
3735 result = Some(layer_result);
3736 break;
3737 }
3738
3739 // No sibling found at this level, try moving up to parent
3740 if !cursor.goto_parent() {
3741 break;
3742 }
3743 }
3744 }
3745
3746 result
3747 }
3748
3749 /// Find the next sibling syntax node at the given range.
3750 ///
3751 /// This function locates the syntax node that follows the node containing
3752 /// the given range. It searches hierarchically by:
3753 /// 1. Finding the node that contains the given range
3754 /// 2. Looking for the next sibling at the same tree level
3755 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3756 ///
3757 /// Returns `None` if there is no next sibling at any ancestor level.
3758 pub fn syntax_next_sibling<'a, T: ToOffset>(
3759 &'a self,
3760 range: Range<T>,
3761 ) -> Option<tree_sitter::Node<'a>> {
3762 let range = range.start.to_offset(self)..range.end.to_offset(self);
3763 let mut result: Option<tree_sitter::Node<'a>> = None;
3764
3765 for layer in self
3766 .syntax
3767 .layers_for_range(range.clone(), &self.text, true)
3768 {
3769 let mut cursor = layer.node().walk();
3770
3771 // Find the node that contains the range
3772 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3773 continue;
3774 }
3775
3776 // Look for the next sibling, moving up ancestor levels if needed
3777 loop {
3778 if cursor.goto_next_sibling() {
3779 let layer_result = cursor.node();
3780
3781 if let Some(previous_result) = &result {
3782 if previous_result.byte_range().start > layer_result.byte_range().start {
3783 continue;
3784 }
3785 }
3786 result = Some(layer_result);
3787 break;
3788 }
3789
3790 // No sibling found at this level, try moving up to parent
3791 if !cursor.goto_parent() {
3792 break;
3793 }
3794 }
3795 }
3796
3797 result
3798 }
3799
3800 /// Returns the root syntax node within the given row
3801 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
3802 let start_offset = position.to_offset(self);
3803
3804 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
3805
3806 let layer = self
3807 .syntax
3808 .layers_for_range(start_offset..start_offset, &self.text, true)
3809 .next()?;
3810
3811 let mut cursor = layer.node().walk();
3812
3813 // Descend to the first leaf that touches the start of the range.
3814 while cursor.goto_first_child_for_byte(start_offset).is_some() {
3815 if cursor.node().end_byte() == start_offset {
3816 cursor.goto_next_sibling();
3817 }
3818 }
3819
3820 // Ascend to the root node within the same row.
3821 while cursor.goto_parent() {
3822 if cursor.node().start_position().row != row {
3823 break;
3824 }
3825 }
3826
3827 Some(cursor.node())
3828 }
3829
3830 /// Returns the outline for the buffer.
3831 ///
3832 /// This method allows passing an optional [`SyntaxTheme`] to
3833 /// syntax-highlight the returned symbols.
3834 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
3835 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
3836 }
3837
3838 /// Returns all the symbols that contain the given position.
3839 ///
3840 /// This method allows passing an optional [`SyntaxTheme`] to
3841 /// syntax-highlight the returned symbols.
3842 pub fn symbols_containing<T: ToOffset>(
3843 &self,
3844 position: T,
3845 theme: Option<&SyntaxTheme>,
3846 ) -> Vec<OutlineItem<Anchor>> {
3847 let position = position.to_offset(self);
3848 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
3849 let end = self.clip_offset(position + 1, Bias::Right);
3850 let mut items = self.outline_items_containing(start..end, false, theme);
3851 let mut prev_depth = None;
3852 items.retain(|item| {
3853 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
3854 prev_depth = Some(item.depth);
3855 result
3856 });
3857 items
3858 }
3859
3860 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
3861 let range = range.to_offset(self);
3862 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3863 grammar.outline_config.as_ref().map(|c| &c.query)
3864 });
3865 let configs = matches
3866 .grammars()
3867 .iter()
3868 .map(|g| g.outline_config.as_ref().unwrap())
3869 .collect::<Vec<_>>();
3870
3871 while let Some(mat) = matches.peek() {
3872 let config = &configs[mat.grammar_index];
3873 let containing_item_node = maybe!({
3874 let item_node = mat.captures.iter().find_map(|cap| {
3875 if cap.index == config.item_capture_ix {
3876 Some(cap.node)
3877 } else {
3878 None
3879 }
3880 })?;
3881
3882 let item_byte_range = item_node.byte_range();
3883 if item_byte_range.end < range.start || item_byte_range.start > range.end {
3884 None
3885 } else {
3886 Some(item_node)
3887 }
3888 });
3889
3890 if let Some(item_node) = containing_item_node {
3891 return Some(
3892 Point::from_ts_point(item_node.start_position())
3893 ..Point::from_ts_point(item_node.end_position()),
3894 );
3895 }
3896
3897 matches.advance();
3898 }
3899 None
3900 }
3901
3902 pub fn outline_items_containing<T: ToOffset>(
3903 &self,
3904 range: Range<T>,
3905 include_extra_context: bool,
3906 theme: Option<&SyntaxTheme>,
3907 ) -> Vec<OutlineItem<Anchor>> {
3908 self.outline_items_containing_internal(
3909 range,
3910 include_extra_context,
3911 theme,
3912 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
3913 )
3914 }
3915
3916 pub fn outline_items_as_points_containing<T: ToOffset>(
3917 &self,
3918 range: Range<T>,
3919 include_extra_context: bool,
3920 theme: Option<&SyntaxTheme>,
3921 ) -> Vec<OutlineItem<Point>> {
3922 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
3923 range
3924 })
3925 }
3926
3927 fn outline_items_containing_internal<T: ToOffset, U>(
3928 &self,
3929 range: Range<T>,
3930 include_extra_context: bool,
3931 theme: Option<&SyntaxTheme>,
3932 range_callback: fn(&Self, Range<Point>) -> Range<U>,
3933 ) -> Vec<OutlineItem<U>> {
3934 let range = range.to_offset(self);
3935 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
3936 grammar.outline_config.as_ref().map(|c| &c.query)
3937 });
3938
3939 let mut items = Vec::new();
3940 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
3941 while let Some(mat) = matches.peek() {
3942 let config = matches.grammars()[mat.grammar_index]
3943 .outline_config
3944 .as_ref()
3945 .unwrap();
3946 if let Some(item) =
3947 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
3948 {
3949 items.push(item);
3950 } else if let Some(capture) = mat
3951 .captures
3952 .iter()
3953 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
3954 {
3955 let capture_range = capture.node.start_position()..capture.node.end_position();
3956 let mut capture_row_range =
3957 capture_range.start.row as u32..capture_range.end.row as u32;
3958 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
3959 {
3960 capture_row_range.end -= 1;
3961 }
3962 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
3963 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
3964 last_row_range.end = capture_row_range.end;
3965 } else {
3966 annotation_row_ranges.push(capture_row_range);
3967 }
3968 } else {
3969 annotation_row_ranges.push(capture_row_range);
3970 }
3971 }
3972 matches.advance();
3973 }
3974
3975 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
3976
3977 // Assign depths based on containment relationships and convert to anchors.
3978 let mut item_ends_stack = Vec::<Point>::new();
3979 let mut anchor_items = Vec::new();
3980 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
3981 for item in items {
3982 while let Some(last_end) = item_ends_stack.last().copied() {
3983 if last_end < item.range.end {
3984 item_ends_stack.pop();
3985 } else {
3986 break;
3987 }
3988 }
3989
3990 let mut annotation_row_range = None;
3991 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
3992 let row_preceding_item = item.range.start.row.saturating_sub(1);
3993 if next_annotation_row_range.end < row_preceding_item {
3994 annotation_row_ranges.next();
3995 } else {
3996 if next_annotation_row_range.end == row_preceding_item {
3997 annotation_row_range = Some(next_annotation_row_range.clone());
3998 annotation_row_ranges.next();
3999 }
4000 break;
4001 }
4002 }
4003
4004 anchor_items.push(OutlineItem {
4005 depth: item_ends_stack.len(),
4006 range: range_callback(self, item.range.clone()),
4007 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4008 text: item.text,
4009 highlight_ranges: item.highlight_ranges,
4010 name_ranges: item.name_ranges,
4011 body_range: item.body_range.map(|r| range_callback(self, r)),
4012 annotation_range: annotation_row_range.map(|annotation_range| {
4013 let point_range = Point::new(annotation_range.start, 0)
4014 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4015 range_callback(self, point_range)
4016 }),
4017 });
4018 item_ends_stack.push(item.range.end);
4019 }
4020
4021 anchor_items
4022 }
4023
4024 fn next_outline_item(
4025 &self,
4026 config: &OutlineConfig,
4027 mat: &SyntaxMapMatch,
4028 range: &Range<usize>,
4029 include_extra_context: bool,
4030 theme: Option<&SyntaxTheme>,
4031 ) -> Option<OutlineItem<Point>> {
4032 let item_node = mat.captures.iter().find_map(|cap| {
4033 if cap.index == config.item_capture_ix {
4034 Some(cap.node)
4035 } else {
4036 None
4037 }
4038 })?;
4039
4040 let item_byte_range = item_node.byte_range();
4041 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4042 return None;
4043 }
4044 let item_point_range = Point::from_ts_point(item_node.start_position())
4045 ..Point::from_ts_point(item_node.end_position());
4046
4047 let mut open_point = None;
4048 let mut close_point = None;
4049
4050 let mut buffer_ranges = Vec::new();
4051 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4052 let mut range = node.start_byte()..node.end_byte();
4053 let start = node.start_position();
4054 if node.end_position().row > start.row {
4055 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4056 }
4057
4058 if !range.is_empty() {
4059 buffer_ranges.push((range, node_is_name));
4060 }
4061 };
4062
4063 for capture in mat.captures {
4064 if capture.index == config.name_capture_ix {
4065 add_to_buffer_ranges(capture.node, true);
4066 } else if Some(capture.index) == config.context_capture_ix
4067 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4068 {
4069 add_to_buffer_ranges(capture.node, false);
4070 } else {
4071 if Some(capture.index) == config.open_capture_ix {
4072 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4073 } else if Some(capture.index) == config.close_capture_ix {
4074 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4075 }
4076 }
4077 }
4078
4079 if buffer_ranges.is_empty() {
4080 return None;
4081 }
4082 let source_range_for_text =
4083 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4084
4085 let mut text = String::new();
4086 let mut highlight_ranges = Vec::new();
4087 let mut name_ranges = Vec::new();
4088 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4089 let mut last_buffer_range_end = 0;
4090 for (buffer_range, is_name) in buffer_ranges {
4091 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4092 if space_added {
4093 text.push(' ');
4094 }
4095 let before_append_len = text.len();
4096 let mut offset = buffer_range.start;
4097 chunks.seek(buffer_range.clone());
4098 for mut chunk in chunks.by_ref() {
4099 if chunk.text.len() > buffer_range.end - offset {
4100 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4101 offset = buffer_range.end;
4102 } else {
4103 offset += chunk.text.len();
4104 }
4105 let style = chunk
4106 .syntax_highlight_id
4107 .zip(theme)
4108 .and_then(|(highlight, theme)| highlight.style(theme));
4109 if let Some(style) = style {
4110 let start = text.len();
4111 let end = start + chunk.text.len();
4112 highlight_ranges.push((start..end, style));
4113 }
4114 text.push_str(chunk.text);
4115 if offset >= buffer_range.end {
4116 break;
4117 }
4118 }
4119 if is_name {
4120 let after_append_len = text.len();
4121 let start = if space_added && !name_ranges.is_empty() {
4122 before_append_len - 1
4123 } else {
4124 before_append_len
4125 };
4126 name_ranges.push(start..after_append_len);
4127 }
4128 last_buffer_range_end = buffer_range.end;
4129 }
4130
4131 Some(OutlineItem {
4132 depth: 0, // We'll calculate the depth later
4133 range: item_point_range,
4134 source_range_for_text: source_range_for_text.to_point(self),
4135 text,
4136 highlight_ranges,
4137 name_ranges,
4138 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4139 annotation_range: None,
4140 })
4141 }
4142
4143 pub fn function_body_fold_ranges<T: ToOffset>(
4144 &self,
4145 within: Range<T>,
4146 ) -> impl Iterator<Item = Range<usize>> + '_ {
4147 self.text_object_ranges(within, TreeSitterOptions::default())
4148 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4149 }
4150
4151 /// For each grammar in the language, runs the provided
4152 /// [`tree_sitter::Query`] against the given range.
4153 pub fn matches(
4154 &self,
4155 range: Range<usize>,
4156 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4157 ) -> SyntaxMapMatches<'_> {
4158 self.syntax.matches(range, self, query)
4159 }
4160
4161 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4162 /// Hence, may return more bracket pairs than the range contains.
4163 ///
4164 /// Will omit known chunks.
4165 /// The resulting bracket match collections are not ordered.
4166 pub fn fetch_bracket_ranges(
4167 &self,
4168 range: Range<usize>,
4169 known_chunks: Option<(&Global, &HashSet<Range<BufferRow>>)>,
4170 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch>> {
4171 let mut tree_sitter_data = self.latest_tree_sitter_data().clone();
4172
4173 let known_chunks = match known_chunks {
4174 Some((known_version, known_chunks)) => {
4175 if !tree_sitter_data
4176 .chunks
4177 .version()
4178 .changed_since(known_version)
4179 {
4180 known_chunks.clone()
4181 } else {
4182 HashSet::default()
4183 }
4184 }
4185 None => HashSet::default(),
4186 };
4187
4188 let mut new_bracket_matches = HashMap::default();
4189 let mut all_bracket_matches = HashMap::default();
4190
4191 for chunk in tree_sitter_data
4192 .chunks
4193 .applicable_chunks(&[self.anchor_before(range.start)..self.anchor_after(range.end)])
4194 {
4195 if known_chunks.contains(&chunk.row_range()) {
4196 continue;
4197 }
4198 let Some(chunk_range) = tree_sitter_data.chunks.chunk_range(chunk) else {
4199 continue;
4200 };
4201 let chunk_range = chunk_range.to_offset(&tree_sitter_data.chunks.snapshot);
4202
4203 let bracket_matches = match tree_sitter_data.brackets_by_chunks[chunk.id].take() {
4204 Some(cached_brackets) => cached_brackets,
4205 None => {
4206 // Sequential IDs are needed to determine the color of the bracket pair.
4207 let mut next_id = match tree_sitter_data.chunks.previous_chunk(chunk) {
4208 Some(previous_chunk) => tree_sitter_data.brackets_by_chunks
4209 [previous_chunk.id]
4210 .as_ref()
4211 .and_then(|previous_brackets| previous_brackets.last())
4212 // Try to continue previous sequence of IDs.
4213 .and_then(|bracket| bracket.id.map(|id| id + 1))
4214 // If not possible, start another sequence: pick it far enough to avoid overlaps.
4215 //
4216 // This for sure will introduce the gaps between chunks' bracket IDs,
4217 // but this will only potentially skip `mod(accents_number)` colors between chunks.
4218 .unwrap_or_else(|| {
4219 (usize::MAX / tree_sitter_data.chunks.len()) * chunk.id + 1
4220 }),
4221 None => 0,
4222 };
4223 let mut matches =
4224 self.syntax
4225 .matches(chunk_range.clone(), &self.text, |grammar| {
4226 grammar.brackets_config.as_ref().map(|c| &c.query)
4227 });
4228 let configs = matches
4229 .grammars()
4230 .iter()
4231 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4232 .collect::<Vec<_>>();
4233
4234 let chunk_range = chunk_range.clone();
4235 let new_matches = iter::from_fn(move || {
4236 while let Some(mat) = matches.peek() {
4237 let mut open = None;
4238 let mut close = None;
4239 let config = configs[mat.grammar_index];
4240 let pattern = &config.patterns[mat.pattern_index];
4241 for capture in mat.captures {
4242 if capture.index == config.open_capture_ix {
4243 open = Some(capture.node.byte_range());
4244 } else if capture.index == config.close_capture_ix {
4245 close = Some(capture.node.byte_range());
4246 }
4247 }
4248
4249 matches.advance();
4250
4251 let Some((open_range, close_range)) = open.zip(close) else {
4252 continue;
4253 };
4254
4255 let bracket_range = open_range.start..=close_range.end;
4256 if !bracket_range.overlaps(&chunk_range) {
4257 continue;
4258 }
4259
4260 return Some(BracketMatch {
4261 open_range,
4262 close_range,
4263 newline_only: pattern.newline_only,
4264 id: pattern
4265 .rainbow_exclude
4266 .not()
4267 .then(|| post_inc(&mut next_id)),
4268 });
4269 }
4270 None
4271 })
4272 .collect::<Vec<_>>();
4273
4274 new_bracket_matches.insert(chunk.id, new_matches.clone());
4275 new_matches
4276 }
4277 };
4278 all_bracket_matches.insert(chunk.row_range(), bracket_matches);
4279 }
4280
4281 let mut latest_tree_sitter_data = self.latest_tree_sitter_data();
4282 if latest_tree_sitter_data.chunks.version() == &self.version {
4283 for (chunk_id, new_matches) in new_bracket_matches {
4284 let old_chunks = &mut latest_tree_sitter_data.brackets_by_chunks[chunk_id];
4285 if old_chunks.is_none() {
4286 *old_chunks = Some(new_matches);
4287 }
4288 }
4289 }
4290
4291 all_bracket_matches
4292 }
4293
4294 fn latest_tree_sitter_data(&self) -> MutexGuard<'_, RawMutex, TreeSitterData> {
4295 let mut tree_sitter_data = self.tree_sitter_data.lock();
4296 if self
4297 .version
4298 .changed_since(tree_sitter_data.chunks.version())
4299 {
4300 *tree_sitter_data = TreeSitterData::new(self.text.clone());
4301 }
4302 tree_sitter_data
4303 }
4304
4305 pub fn all_bracket_ranges(&self, range: Range<usize>) -> impl Iterator<Item = BracketMatch> {
4306 self.fetch_bracket_ranges(range.clone(), None)
4307 .into_values()
4308 .flatten()
4309 .filter(move |bracket_match| {
4310 let bracket_range = bracket_match.open_range.start..=bracket_match.close_range.end;
4311 bracket_range.overlaps(&range)
4312 })
4313 }
4314
4315 /// Returns bracket range pairs overlapping or adjacent to `range`
4316 pub fn bracket_ranges<T: ToOffset>(
4317 &self,
4318 range: Range<T>,
4319 ) -> impl Iterator<Item = BracketMatch> + '_ {
4320 // Find bracket pairs that *inclusively* contain the given range.
4321 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4322 self.all_bracket_ranges(range)
4323 .filter(|pair| !pair.newline_only)
4324 }
4325
4326 pub fn debug_variables_query<T: ToOffset>(
4327 &self,
4328 range: Range<T>,
4329 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4330 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4331
4332 let mut matches = self.syntax.matches_with_options(
4333 range.clone(),
4334 &self.text,
4335 TreeSitterOptions::default(),
4336 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4337 );
4338
4339 let configs = matches
4340 .grammars()
4341 .iter()
4342 .map(|grammar| grammar.debug_variables_config.as_ref())
4343 .collect::<Vec<_>>();
4344
4345 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4346
4347 iter::from_fn(move || {
4348 loop {
4349 while let Some(capture) = captures.pop() {
4350 if capture.0.overlaps(&range) {
4351 return Some(capture);
4352 }
4353 }
4354
4355 let mat = matches.peek()?;
4356
4357 let Some(config) = configs[mat.grammar_index].as_ref() else {
4358 matches.advance();
4359 continue;
4360 };
4361
4362 for capture in mat.captures {
4363 let Some(ix) = config
4364 .objects_by_capture_ix
4365 .binary_search_by_key(&capture.index, |e| e.0)
4366 .ok()
4367 else {
4368 continue;
4369 };
4370 let text_object = config.objects_by_capture_ix[ix].1;
4371 let byte_range = capture.node.byte_range();
4372
4373 let mut found = false;
4374 for (range, existing) in captures.iter_mut() {
4375 if existing == &text_object {
4376 range.start = range.start.min(byte_range.start);
4377 range.end = range.end.max(byte_range.end);
4378 found = true;
4379 break;
4380 }
4381 }
4382
4383 if !found {
4384 captures.push((byte_range, text_object));
4385 }
4386 }
4387
4388 matches.advance();
4389 }
4390 })
4391 }
4392
4393 pub fn text_object_ranges<T: ToOffset>(
4394 &self,
4395 range: Range<T>,
4396 options: TreeSitterOptions,
4397 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4398 let range =
4399 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4400
4401 let mut matches =
4402 self.syntax
4403 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4404 grammar.text_object_config.as_ref().map(|c| &c.query)
4405 });
4406
4407 let configs = matches
4408 .grammars()
4409 .iter()
4410 .map(|grammar| grammar.text_object_config.as_ref())
4411 .collect::<Vec<_>>();
4412
4413 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4414
4415 iter::from_fn(move || {
4416 loop {
4417 while let Some(capture) = captures.pop() {
4418 if capture.0.overlaps(&range) {
4419 return Some(capture);
4420 }
4421 }
4422
4423 let mat = matches.peek()?;
4424
4425 let Some(config) = configs[mat.grammar_index].as_ref() else {
4426 matches.advance();
4427 continue;
4428 };
4429
4430 for capture in mat.captures {
4431 let Some(ix) = config
4432 .text_objects_by_capture_ix
4433 .binary_search_by_key(&capture.index, |e| e.0)
4434 .ok()
4435 else {
4436 continue;
4437 };
4438 let text_object = config.text_objects_by_capture_ix[ix].1;
4439 let byte_range = capture.node.byte_range();
4440
4441 let mut found = false;
4442 for (range, existing) in captures.iter_mut() {
4443 if existing == &text_object {
4444 range.start = range.start.min(byte_range.start);
4445 range.end = range.end.max(byte_range.end);
4446 found = true;
4447 break;
4448 }
4449 }
4450
4451 if !found {
4452 captures.push((byte_range, text_object));
4453 }
4454 }
4455
4456 matches.advance();
4457 }
4458 })
4459 }
4460
4461 /// Returns enclosing bracket ranges containing the given range
4462 pub fn enclosing_bracket_ranges<T: ToOffset>(
4463 &self,
4464 range: Range<T>,
4465 ) -> impl Iterator<Item = BracketMatch> + '_ {
4466 let range = range.start.to_offset(self)..range.end.to_offset(self);
4467
4468 self.bracket_ranges(range.clone()).filter(move |pair| {
4469 pair.open_range.start <= range.start && pair.close_range.end >= range.end
4470 })
4471 }
4472
4473 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4474 ///
4475 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4476 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4477 &self,
4478 range: Range<T>,
4479 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4480 ) -> Option<(Range<usize>, Range<usize>)> {
4481 let range = range.start.to_offset(self)..range.end.to_offset(self);
4482
4483 // Get the ranges of the innermost pair of brackets.
4484 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4485
4486 for pair in self.enclosing_bracket_ranges(range) {
4487 if let Some(range_filter) = range_filter
4488 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4489 {
4490 continue;
4491 }
4492
4493 let len = pair.close_range.end - pair.open_range.start;
4494
4495 if let Some((existing_open, existing_close)) = &result {
4496 let existing_len = existing_close.end - existing_open.start;
4497 if len > existing_len {
4498 continue;
4499 }
4500 }
4501
4502 result = Some((pair.open_range, pair.close_range));
4503 }
4504
4505 result
4506 }
4507
4508 /// Returns anchor ranges for any matches of the redaction query.
4509 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4510 /// will be run on the relevant section of the buffer.
4511 pub fn redacted_ranges<T: ToOffset>(
4512 &self,
4513 range: Range<T>,
4514 ) -> impl Iterator<Item = Range<usize>> + '_ {
4515 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4516 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4517 grammar
4518 .redactions_config
4519 .as_ref()
4520 .map(|config| &config.query)
4521 });
4522
4523 let configs = syntax_matches
4524 .grammars()
4525 .iter()
4526 .map(|grammar| grammar.redactions_config.as_ref())
4527 .collect::<Vec<_>>();
4528
4529 iter::from_fn(move || {
4530 let redacted_range = syntax_matches
4531 .peek()
4532 .and_then(|mat| {
4533 configs[mat.grammar_index].and_then(|config| {
4534 mat.captures
4535 .iter()
4536 .find(|capture| capture.index == config.redaction_capture_ix)
4537 })
4538 })
4539 .map(|mat| mat.node.byte_range());
4540 syntax_matches.advance();
4541 redacted_range
4542 })
4543 }
4544
4545 pub fn injections_intersecting_range<T: ToOffset>(
4546 &self,
4547 range: Range<T>,
4548 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4549 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4550
4551 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4552 grammar
4553 .injection_config
4554 .as_ref()
4555 .map(|config| &config.query)
4556 });
4557
4558 let configs = syntax_matches
4559 .grammars()
4560 .iter()
4561 .map(|grammar| grammar.injection_config.as_ref())
4562 .collect::<Vec<_>>();
4563
4564 iter::from_fn(move || {
4565 let ranges = syntax_matches.peek().and_then(|mat| {
4566 let config = &configs[mat.grammar_index]?;
4567 let content_capture_range = mat.captures.iter().find_map(|capture| {
4568 if capture.index == config.content_capture_ix {
4569 Some(capture.node.byte_range())
4570 } else {
4571 None
4572 }
4573 })?;
4574 let language = self.language_at(content_capture_range.start)?;
4575 Some((content_capture_range, language))
4576 });
4577 syntax_matches.advance();
4578 ranges
4579 })
4580 }
4581
4582 pub fn runnable_ranges(
4583 &self,
4584 offset_range: Range<usize>,
4585 ) -> impl Iterator<Item = RunnableRange> + '_ {
4586 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4587 grammar.runnable_config.as_ref().map(|config| &config.query)
4588 });
4589
4590 let test_configs = syntax_matches
4591 .grammars()
4592 .iter()
4593 .map(|grammar| grammar.runnable_config.as_ref())
4594 .collect::<Vec<_>>();
4595
4596 iter::from_fn(move || {
4597 loop {
4598 let mat = syntax_matches.peek()?;
4599
4600 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4601 let mut run_range = None;
4602 let full_range = mat.captures.iter().fold(
4603 Range {
4604 start: usize::MAX,
4605 end: 0,
4606 },
4607 |mut acc, next| {
4608 let byte_range = next.node.byte_range();
4609 if acc.start > byte_range.start {
4610 acc.start = byte_range.start;
4611 }
4612 if acc.end < byte_range.end {
4613 acc.end = byte_range.end;
4614 }
4615 acc
4616 },
4617 );
4618 if full_range.start > full_range.end {
4619 // We did not find a full spanning range of this match.
4620 return None;
4621 }
4622 let extra_captures: SmallVec<[_; 1]> =
4623 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4624 test_configs
4625 .extra_captures
4626 .get(capture.index as usize)
4627 .cloned()
4628 .and_then(|tag_name| match tag_name {
4629 RunnableCapture::Named(name) => {
4630 Some((capture.node.byte_range(), name))
4631 }
4632 RunnableCapture::Run => {
4633 let _ = run_range.insert(capture.node.byte_range());
4634 None
4635 }
4636 })
4637 }));
4638 let run_range = run_range?;
4639 let tags = test_configs
4640 .query
4641 .property_settings(mat.pattern_index)
4642 .iter()
4643 .filter_map(|property| {
4644 if *property.key == *"tag" {
4645 property
4646 .value
4647 .as_ref()
4648 .map(|value| RunnableTag(value.to_string().into()))
4649 } else {
4650 None
4651 }
4652 })
4653 .collect();
4654 let extra_captures = extra_captures
4655 .into_iter()
4656 .map(|(range, name)| {
4657 (
4658 name.to_string(),
4659 self.text_for_range(range).collect::<String>(),
4660 )
4661 })
4662 .collect();
4663 // All tags should have the same range.
4664 Some(RunnableRange {
4665 run_range,
4666 full_range,
4667 runnable: Runnable {
4668 tags,
4669 language: mat.language,
4670 buffer: self.remote_id(),
4671 },
4672 extra_captures,
4673 buffer_id: self.remote_id(),
4674 })
4675 });
4676
4677 syntax_matches.advance();
4678 if test_range.is_some() {
4679 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
4680 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
4681 return test_range;
4682 }
4683 }
4684 })
4685 }
4686
4687 /// Returns selections for remote peers intersecting the given range.
4688 #[allow(clippy::type_complexity)]
4689 pub fn selections_in_range(
4690 &self,
4691 range: Range<Anchor>,
4692 include_local: bool,
4693 ) -> impl Iterator<
4694 Item = (
4695 ReplicaId,
4696 bool,
4697 CursorShape,
4698 impl Iterator<Item = &Selection<Anchor>> + '_,
4699 ),
4700 > + '_ {
4701 self.remote_selections
4702 .iter()
4703 .filter(move |(replica_id, set)| {
4704 (include_local || **replica_id != self.text.replica_id())
4705 && !set.selections.is_empty()
4706 })
4707 .map(move |(replica_id, set)| {
4708 let start_ix = match set.selections.binary_search_by(|probe| {
4709 probe.end.cmp(&range.start, self).then(Ordering::Greater)
4710 }) {
4711 Ok(ix) | Err(ix) => ix,
4712 };
4713 let end_ix = match set.selections.binary_search_by(|probe| {
4714 probe.start.cmp(&range.end, self).then(Ordering::Less)
4715 }) {
4716 Ok(ix) | Err(ix) => ix,
4717 };
4718
4719 (
4720 *replica_id,
4721 set.line_mode,
4722 set.cursor_shape,
4723 set.selections[start_ix..end_ix].iter(),
4724 )
4725 })
4726 }
4727
4728 /// Returns if the buffer contains any diagnostics.
4729 pub fn has_diagnostics(&self) -> bool {
4730 !self.diagnostics.is_empty()
4731 }
4732
4733 /// Returns all the diagnostics intersecting the given range.
4734 pub fn diagnostics_in_range<'a, T, O>(
4735 &'a self,
4736 search_range: Range<T>,
4737 reversed: bool,
4738 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
4739 where
4740 T: 'a + Clone + ToOffset,
4741 O: 'a + FromAnchor,
4742 {
4743 let mut iterators: Vec<_> = self
4744 .diagnostics
4745 .iter()
4746 .map(|(_, collection)| {
4747 collection
4748 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
4749 .peekable()
4750 })
4751 .collect();
4752
4753 std::iter::from_fn(move || {
4754 let (next_ix, _) = iterators
4755 .iter_mut()
4756 .enumerate()
4757 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
4758 .min_by(|(_, a), (_, b)| {
4759 let cmp = a
4760 .range
4761 .start
4762 .cmp(&b.range.start, self)
4763 // when range is equal, sort by diagnostic severity
4764 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
4765 // and stabilize order with group_id
4766 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
4767 if reversed { cmp.reverse() } else { cmp }
4768 })?;
4769 iterators[next_ix]
4770 .next()
4771 .map(
4772 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
4773 diagnostic,
4774 range: FromAnchor::from_anchor(&range.start, self)
4775 ..FromAnchor::from_anchor(&range.end, self),
4776 },
4777 )
4778 })
4779 }
4780
4781 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
4782 /// should be used instead.
4783 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
4784 &self.diagnostics
4785 }
4786
4787 /// Returns all the diagnostic groups associated with the given
4788 /// language server ID. If no language server ID is provided,
4789 /// all diagnostics groups are returned.
4790 pub fn diagnostic_groups(
4791 &self,
4792 language_server_id: Option<LanguageServerId>,
4793 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
4794 let mut groups = Vec::new();
4795
4796 if let Some(language_server_id) = language_server_id {
4797 if let Ok(ix) = self
4798 .diagnostics
4799 .binary_search_by_key(&language_server_id, |e| e.0)
4800 {
4801 self.diagnostics[ix]
4802 .1
4803 .groups(language_server_id, &mut groups, self);
4804 }
4805 } else {
4806 for (language_server_id, diagnostics) in self.diagnostics.iter() {
4807 diagnostics.groups(*language_server_id, &mut groups, self);
4808 }
4809 }
4810
4811 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
4812 let a_start = &group_a.entries[group_a.primary_ix].range.start;
4813 let b_start = &group_b.entries[group_b.primary_ix].range.start;
4814 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
4815 });
4816
4817 groups
4818 }
4819
4820 /// Returns an iterator over the diagnostics for the given group.
4821 pub fn diagnostic_group<O>(
4822 &self,
4823 group_id: usize,
4824 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
4825 where
4826 O: FromAnchor + 'static,
4827 {
4828 self.diagnostics
4829 .iter()
4830 .flat_map(move |(_, set)| set.group(group_id, self))
4831 }
4832
4833 /// An integer version number that accounts for all updates besides
4834 /// the buffer's text itself (which is versioned via a version vector).
4835 pub fn non_text_state_update_count(&self) -> usize {
4836 self.non_text_state_update_count
4837 }
4838
4839 /// An integer version that changes when the buffer's syntax changes.
4840 pub fn syntax_update_count(&self) -> usize {
4841 self.syntax.update_count()
4842 }
4843
4844 /// Returns a snapshot of underlying file.
4845 pub fn file(&self) -> Option<&Arc<dyn File>> {
4846 self.file.as_ref()
4847 }
4848
4849 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
4850 if let Some(file) = self.file() {
4851 if file.path().file_name().is_none() || include_root {
4852 Some(file.full_path(cx).to_string_lossy().into_owned())
4853 } else {
4854 Some(file.path().display(file.path_style(cx)).to_string())
4855 }
4856 } else {
4857 None
4858 }
4859 }
4860
4861 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
4862 let query_str = query.fuzzy_contents;
4863 if query_str.is_some_and(|query| query.is_empty()) {
4864 return BTreeMap::default();
4865 }
4866
4867 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
4868 language,
4869 override_id: None,
4870 }));
4871
4872 let mut query_ix = 0;
4873 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
4874 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
4875
4876 let mut words = BTreeMap::default();
4877 let mut current_word_start_ix = None;
4878 let mut chunk_ix = query.range.start;
4879 for chunk in self.chunks(query.range, false) {
4880 for (i, c) in chunk.text.char_indices() {
4881 let ix = chunk_ix + i;
4882 if classifier.is_word(c) {
4883 if current_word_start_ix.is_none() {
4884 current_word_start_ix = Some(ix);
4885 }
4886
4887 if let Some(query_chars) = &query_chars
4888 && query_ix < query_len
4889 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
4890 {
4891 query_ix += 1;
4892 }
4893 continue;
4894 } else if let Some(word_start) = current_word_start_ix.take()
4895 && query_ix == query_len
4896 {
4897 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
4898 let mut word_text = self.text_for_range(word_start..ix).peekable();
4899 let first_char = word_text
4900 .peek()
4901 .and_then(|first_chunk| first_chunk.chars().next());
4902 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
4903 if !query.skip_digits
4904 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
4905 {
4906 words.insert(word_text.collect(), word_range);
4907 }
4908 }
4909 query_ix = 0;
4910 }
4911 chunk_ix += chunk.text.len();
4912 }
4913
4914 words
4915 }
4916}
4917
4918pub struct WordsQuery<'a> {
4919 /// Only returns words with all chars from the fuzzy string in them.
4920 pub fuzzy_contents: Option<&'a str>,
4921 /// Skips words that start with a digit.
4922 pub skip_digits: bool,
4923 /// Buffer offset range, to look for words.
4924 pub range: Range<usize>,
4925}
4926
4927fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
4928 indent_size_for_text(text.chars_at(Point::new(row, 0)))
4929}
4930
4931fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
4932 let mut result = IndentSize::spaces(0);
4933 for c in text {
4934 let kind = match c {
4935 ' ' => IndentKind::Space,
4936 '\t' => IndentKind::Tab,
4937 _ => break,
4938 };
4939 if result.len == 0 {
4940 result.kind = kind;
4941 }
4942 result.len += 1;
4943 }
4944 result
4945}
4946
4947impl Clone for BufferSnapshot {
4948 fn clone(&self) -> Self {
4949 Self {
4950 text: self.text.clone(),
4951 syntax: self.syntax.clone(),
4952 file: self.file.clone(),
4953 remote_selections: self.remote_selections.clone(),
4954 diagnostics: self.diagnostics.clone(),
4955 language: self.language.clone(),
4956 tree_sitter_data: self.tree_sitter_data.clone(),
4957 non_text_state_update_count: self.non_text_state_update_count,
4958 }
4959 }
4960}
4961
4962impl Deref for BufferSnapshot {
4963 type Target = text::BufferSnapshot;
4964
4965 fn deref(&self) -> &Self::Target {
4966 &self.text
4967 }
4968}
4969
4970unsafe impl Send for BufferChunks<'_> {}
4971
4972impl<'a> BufferChunks<'a> {
4973 pub(crate) fn new(
4974 text: &'a Rope,
4975 range: Range<usize>,
4976 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
4977 diagnostics: bool,
4978 buffer_snapshot: Option<&'a BufferSnapshot>,
4979 ) -> Self {
4980 let mut highlights = None;
4981 if let Some((captures, highlight_maps)) = syntax {
4982 highlights = Some(BufferChunkHighlights {
4983 captures,
4984 next_capture: None,
4985 stack: Default::default(),
4986 highlight_maps,
4987 })
4988 }
4989
4990 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
4991 let chunks = text.chunks_in_range(range.clone());
4992
4993 let mut this = BufferChunks {
4994 range,
4995 buffer_snapshot,
4996 chunks,
4997 diagnostic_endpoints,
4998 error_depth: 0,
4999 warning_depth: 0,
5000 information_depth: 0,
5001 hint_depth: 0,
5002 unnecessary_depth: 0,
5003 underline: true,
5004 highlights,
5005 };
5006 this.initialize_diagnostic_endpoints();
5007 this
5008 }
5009
5010 /// Seeks to the given byte offset in the buffer.
5011 pub fn seek(&mut self, range: Range<usize>) {
5012 let old_range = std::mem::replace(&mut self.range, range.clone());
5013 self.chunks.set_range(self.range.clone());
5014 if let Some(highlights) = self.highlights.as_mut() {
5015 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5016 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5017 highlights
5018 .stack
5019 .retain(|(end_offset, _)| *end_offset > range.start);
5020 if let Some(capture) = &highlights.next_capture
5021 && range.start >= capture.node.start_byte()
5022 {
5023 let next_capture_end = capture.node.end_byte();
5024 if range.start < next_capture_end {
5025 highlights.stack.push((
5026 next_capture_end,
5027 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5028 ));
5029 }
5030 highlights.next_capture.take();
5031 }
5032 } else if let Some(snapshot) = self.buffer_snapshot {
5033 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5034 *highlights = BufferChunkHighlights {
5035 captures,
5036 next_capture: None,
5037 stack: Default::default(),
5038 highlight_maps,
5039 };
5040 } else {
5041 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5042 // Seeking such BufferChunks is not supported.
5043 debug_assert!(
5044 false,
5045 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5046 );
5047 }
5048
5049 highlights.captures.set_byte_range(self.range.clone());
5050 self.initialize_diagnostic_endpoints();
5051 }
5052 }
5053
5054 fn initialize_diagnostic_endpoints(&mut self) {
5055 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5056 && let Some(buffer) = self.buffer_snapshot
5057 {
5058 let mut diagnostic_endpoints = Vec::new();
5059 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5060 diagnostic_endpoints.push(DiagnosticEndpoint {
5061 offset: entry.range.start,
5062 is_start: true,
5063 severity: entry.diagnostic.severity,
5064 is_unnecessary: entry.diagnostic.is_unnecessary,
5065 underline: entry.diagnostic.underline,
5066 });
5067 diagnostic_endpoints.push(DiagnosticEndpoint {
5068 offset: entry.range.end,
5069 is_start: false,
5070 severity: entry.diagnostic.severity,
5071 is_unnecessary: entry.diagnostic.is_unnecessary,
5072 underline: entry.diagnostic.underline,
5073 });
5074 }
5075 diagnostic_endpoints
5076 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5077 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5078 self.hint_depth = 0;
5079 self.error_depth = 0;
5080 self.warning_depth = 0;
5081 self.information_depth = 0;
5082 }
5083 }
5084
5085 /// The current byte offset in the buffer.
5086 pub fn offset(&self) -> usize {
5087 self.range.start
5088 }
5089
5090 pub fn range(&self) -> Range<usize> {
5091 self.range.clone()
5092 }
5093
5094 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5095 let depth = match endpoint.severity {
5096 DiagnosticSeverity::ERROR => &mut self.error_depth,
5097 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5098 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5099 DiagnosticSeverity::HINT => &mut self.hint_depth,
5100 _ => return,
5101 };
5102 if endpoint.is_start {
5103 *depth += 1;
5104 } else {
5105 *depth -= 1;
5106 }
5107
5108 if endpoint.is_unnecessary {
5109 if endpoint.is_start {
5110 self.unnecessary_depth += 1;
5111 } else {
5112 self.unnecessary_depth -= 1;
5113 }
5114 }
5115 }
5116
5117 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5118 if self.error_depth > 0 {
5119 Some(DiagnosticSeverity::ERROR)
5120 } else if self.warning_depth > 0 {
5121 Some(DiagnosticSeverity::WARNING)
5122 } else if self.information_depth > 0 {
5123 Some(DiagnosticSeverity::INFORMATION)
5124 } else if self.hint_depth > 0 {
5125 Some(DiagnosticSeverity::HINT)
5126 } else {
5127 None
5128 }
5129 }
5130
5131 fn current_code_is_unnecessary(&self) -> bool {
5132 self.unnecessary_depth > 0
5133 }
5134}
5135
5136impl<'a> Iterator for BufferChunks<'a> {
5137 type Item = Chunk<'a>;
5138
5139 fn next(&mut self) -> Option<Self::Item> {
5140 let mut next_capture_start = usize::MAX;
5141 let mut next_diagnostic_endpoint = usize::MAX;
5142
5143 if let Some(highlights) = self.highlights.as_mut() {
5144 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5145 if *parent_capture_end <= self.range.start {
5146 highlights.stack.pop();
5147 } else {
5148 break;
5149 }
5150 }
5151
5152 if highlights.next_capture.is_none() {
5153 highlights.next_capture = highlights.captures.next();
5154 }
5155
5156 while let Some(capture) = highlights.next_capture.as_ref() {
5157 if self.range.start < capture.node.start_byte() {
5158 next_capture_start = capture.node.start_byte();
5159 break;
5160 } else {
5161 let highlight_id =
5162 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5163 highlights
5164 .stack
5165 .push((capture.node.end_byte(), highlight_id));
5166 highlights.next_capture = highlights.captures.next();
5167 }
5168 }
5169 }
5170
5171 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5172 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5173 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5174 if endpoint.offset <= self.range.start {
5175 self.update_diagnostic_depths(endpoint);
5176 diagnostic_endpoints.next();
5177 self.underline = endpoint.underline;
5178 } else {
5179 next_diagnostic_endpoint = endpoint.offset;
5180 break;
5181 }
5182 }
5183 }
5184 self.diagnostic_endpoints = diagnostic_endpoints;
5185
5186 if let Some(ChunkBitmaps {
5187 text: chunk,
5188 chars: chars_map,
5189 tabs,
5190 }) = self.chunks.peek_with_bitmaps()
5191 {
5192 let chunk_start = self.range.start;
5193 let mut chunk_end = (self.chunks.offset() + chunk.len())
5194 .min(next_capture_start)
5195 .min(next_diagnostic_endpoint);
5196 let mut highlight_id = None;
5197 if let Some(highlights) = self.highlights.as_ref()
5198 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5199 {
5200 chunk_end = chunk_end.min(*parent_capture_end);
5201 highlight_id = Some(*parent_highlight_id);
5202 }
5203 let bit_start = chunk_start - self.chunks.offset();
5204 let bit_end = chunk_end - self.chunks.offset();
5205
5206 let slice = &chunk[bit_start..bit_end];
5207
5208 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5209 let tabs = (tabs >> bit_start) & mask;
5210 let chars = (chars_map >> bit_start) & mask;
5211
5212 self.range.start = chunk_end;
5213 if self.range.start == self.chunks.offset() + chunk.len() {
5214 self.chunks.next().unwrap();
5215 }
5216
5217 Some(Chunk {
5218 text: slice,
5219 syntax_highlight_id: highlight_id,
5220 underline: self.underline,
5221 diagnostic_severity: self.current_diagnostic_severity(),
5222 is_unnecessary: self.current_code_is_unnecessary(),
5223 tabs,
5224 chars,
5225 ..Chunk::default()
5226 })
5227 } else {
5228 None
5229 }
5230 }
5231}
5232
5233impl operation_queue::Operation for Operation {
5234 fn lamport_timestamp(&self) -> clock::Lamport {
5235 match self {
5236 Operation::Buffer(_) => {
5237 unreachable!("buffer operations should never be deferred at this layer")
5238 }
5239 Operation::UpdateDiagnostics {
5240 lamport_timestamp, ..
5241 }
5242 | Operation::UpdateSelections {
5243 lamport_timestamp, ..
5244 }
5245 | Operation::UpdateCompletionTriggers {
5246 lamport_timestamp, ..
5247 }
5248 | Operation::UpdateLineEnding {
5249 lamport_timestamp, ..
5250 } => *lamport_timestamp,
5251 }
5252 }
5253}
5254
5255impl Default for Diagnostic {
5256 fn default() -> Self {
5257 Self {
5258 source: Default::default(),
5259 source_kind: DiagnosticSourceKind::Other,
5260 code: None,
5261 code_description: None,
5262 severity: DiagnosticSeverity::ERROR,
5263 message: Default::default(),
5264 markdown: None,
5265 group_id: 0,
5266 is_primary: false,
5267 is_disk_based: false,
5268 is_unnecessary: false,
5269 underline: true,
5270 data: None,
5271 }
5272 }
5273}
5274
5275impl IndentSize {
5276 /// Returns an [`IndentSize`] representing the given spaces.
5277 pub fn spaces(len: u32) -> Self {
5278 Self {
5279 len,
5280 kind: IndentKind::Space,
5281 }
5282 }
5283
5284 /// Returns an [`IndentSize`] representing a tab.
5285 pub fn tab() -> Self {
5286 Self {
5287 len: 1,
5288 kind: IndentKind::Tab,
5289 }
5290 }
5291
5292 /// An iterator over the characters represented by this [`IndentSize`].
5293 pub fn chars(&self) -> impl Iterator<Item = char> {
5294 iter::repeat(self.char()).take(self.len as usize)
5295 }
5296
5297 /// The character representation of this [`IndentSize`].
5298 pub fn char(&self) -> char {
5299 match self.kind {
5300 IndentKind::Space => ' ',
5301 IndentKind::Tab => '\t',
5302 }
5303 }
5304
5305 /// Consumes the current [`IndentSize`] and returns a new one that has
5306 /// been shrunk or enlarged by the given size along the given direction.
5307 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5308 match direction {
5309 Ordering::Less => {
5310 if self.kind == size.kind && self.len >= size.len {
5311 self.len -= size.len;
5312 }
5313 }
5314 Ordering::Equal => {}
5315 Ordering::Greater => {
5316 if self.len == 0 {
5317 self = size;
5318 } else if self.kind == size.kind {
5319 self.len += size.len;
5320 }
5321 }
5322 }
5323 self
5324 }
5325
5326 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5327 match self.kind {
5328 IndentKind::Space => self.len as usize,
5329 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5330 }
5331 }
5332}
5333
5334#[cfg(any(test, feature = "test-support"))]
5335pub struct TestFile {
5336 pub path: Arc<RelPath>,
5337 pub root_name: String,
5338 pub local_root: Option<PathBuf>,
5339}
5340
5341#[cfg(any(test, feature = "test-support"))]
5342impl File for TestFile {
5343 fn path(&self) -> &Arc<RelPath> {
5344 &self.path
5345 }
5346
5347 fn full_path(&self, _: &gpui::App) -> PathBuf {
5348 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5349 }
5350
5351 fn as_local(&self) -> Option<&dyn LocalFile> {
5352 if self.local_root.is_some() {
5353 Some(self)
5354 } else {
5355 None
5356 }
5357 }
5358
5359 fn disk_state(&self) -> DiskState {
5360 unimplemented!()
5361 }
5362
5363 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5364 self.path().file_name().unwrap_or(self.root_name.as_ref())
5365 }
5366
5367 fn worktree_id(&self, _: &App) -> WorktreeId {
5368 WorktreeId::from_usize(0)
5369 }
5370
5371 fn to_proto(&self, _: &App) -> rpc::proto::File {
5372 unimplemented!()
5373 }
5374
5375 fn is_private(&self) -> bool {
5376 false
5377 }
5378
5379 fn path_style(&self, _cx: &App) -> PathStyle {
5380 PathStyle::local()
5381 }
5382}
5383
5384#[cfg(any(test, feature = "test-support"))]
5385impl LocalFile for TestFile {
5386 fn abs_path(&self, _cx: &App) -> PathBuf {
5387 PathBuf::from(self.local_root.as_ref().unwrap())
5388 .join(&self.root_name)
5389 .join(self.path.as_std_path())
5390 }
5391
5392 fn load(&self, _cx: &App) -> Task<Result<String>> {
5393 unimplemented!()
5394 }
5395
5396 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5397 unimplemented!()
5398 }
5399}
5400
5401pub(crate) fn contiguous_ranges(
5402 values: impl Iterator<Item = u32>,
5403 max_len: usize,
5404) -> impl Iterator<Item = Range<u32>> {
5405 let mut values = values;
5406 let mut current_range: Option<Range<u32>> = None;
5407 std::iter::from_fn(move || {
5408 loop {
5409 if let Some(value) = values.next() {
5410 if let Some(range) = &mut current_range
5411 && value == range.end
5412 && range.len() < max_len
5413 {
5414 range.end += 1;
5415 continue;
5416 }
5417
5418 let prev_range = current_range.clone();
5419 current_range = Some(value..(value + 1));
5420 if prev_range.is_some() {
5421 return prev_range;
5422 }
5423 } else {
5424 return current_range.take();
5425 }
5426 }
5427 })
5428}
5429
5430#[derive(Default, Debug)]
5431pub struct CharClassifier {
5432 scope: Option<LanguageScope>,
5433 scope_context: Option<CharScopeContext>,
5434 ignore_punctuation: bool,
5435}
5436
5437impl CharClassifier {
5438 pub fn new(scope: Option<LanguageScope>) -> Self {
5439 Self {
5440 scope,
5441 scope_context: None,
5442 ignore_punctuation: false,
5443 }
5444 }
5445
5446 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5447 Self {
5448 scope_context,
5449 ..self
5450 }
5451 }
5452
5453 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5454 Self {
5455 ignore_punctuation,
5456 ..self
5457 }
5458 }
5459
5460 pub fn is_whitespace(&self, c: char) -> bool {
5461 self.kind(c) == CharKind::Whitespace
5462 }
5463
5464 pub fn is_word(&self, c: char) -> bool {
5465 self.kind(c) == CharKind::Word
5466 }
5467
5468 pub fn is_punctuation(&self, c: char) -> bool {
5469 self.kind(c) == CharKind::Punctuation
5470 }
5471
5472 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5473 if c.is_alphanumeric() || c == '_' {
5474 return CharKind::Word;
5475 }
5476
5477 if let Some(scope) = &self.scope {
5478 let characters = match self.scope_context {
5479 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5480 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5481 None => scope.word_characters(),
5482 };
5483 if let Some(characters) = characters
5484 && characters.contains(&c)
5485 {
5486 return CharKind::Word;
5487 }
5488 }
5489
5490 if c.is_whitespace() {
5491 return CharKind::Whitespace;
5492 }
5493
5494 if ignore_punctuation {
5495 CharKind::Word
5496 } else {
5497 CharKind::Punctuation
5498 }
5499 }
5500
5501 pub fn kind(&self, c: char) -> CharKind {
5502 self.kind_with(c, self.ignore_punctuation)
5503 }
5504}
5505
5506/// Find all of the ranges of whitespace that occur at the ends of lines
5507/// in the given rope.
5508///
5509/// This could also be done with a regex search, but this implementation
5510/// avoids copying text.
5511pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5512 let mut ranges = Vec::new();
5513
5514 let mut offset = 0;
5515 let mut prev_chunk_trailing_whitespace_range = 0..0;
5516 for chunk in rope.chunks() {
5517 let mut prev_line_trailing_whitespace_range = 0..0;
5518 for (i, line) in chunk.split('\n').enumerate() {
5519 let line_end_offset = offset + line.len();
5520 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5521 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5522
5523 if i == 0 && trimmed_line_len == 0 {
5524 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5525 }
5526 if !prev_line_trailing_whitespace_range.is_empty() {
5527 ranges.push(prev_line_trailing_whitespace_range);
5528 }
5529
5530 offset = line_end_offset + 1;
5531 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5532 }
5533
5534 offset -= 1;
5535 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5536 }
5537
5538 if !prev_chunk_trailing_whitespace_range.is_empty() {
5539 ranges.push(prev_chunk_trailing_whitespace_range);
5540 }
5541
5542 ranges
5543}