1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, ModelineSettings, Outline, OutlineConfig, PLAIN_TEXT,
5 RunnableCapture, RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{AutoIndentMode, LanguageSettings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, HighlightId, HighlightMap, Language, LanguageRegistry, diagnostic_set::DiagnosticSet,
20 proto,
21};
22
23use anyhow::{Context as _, Result};
24use clock::Lamport;
25pub use clock::ReplicaId;
26use collections::{HashMap, HashSet};
27use encoding_rs::Encoding;
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TextStyle,
33};
34
35use lsp::LanguageServerId;
36use parking_lot::Mutex;
37use settings::WorktreeId;
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::PathBuf,
52 rc,
53 sync::Arc,
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, maybe, paths::PathStyle, rel_path::RelPath};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// Indicate whether a [`Buffer`] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a mutable replica, but toggled to be only readable.
82 Read,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87impl Capability {
88 /// Returns `true` if the capability is `ReadWrite`.
89 pub fn editable(self) -> bool {
90 matches!(self, Capability::ReadWrite)
91 }
92}
93
94pub type BufferRow = u32;
95
96/// An in-memory representation of a source code file, including its text,
97/// syntax trees, git status, and diagnostics.
98pub struct Buffer {
99 text: TextBuffer,
100 /// Filesystem state, `None` when there is no path.
101 file: Option<Arc<dyn File>>,
102 /// The mtime of the file when this buffer was last loaded from
103 /// or saved to disk.
104 saved_mtime: Option<MTime>,
105 /// The version vector when this buffer was last loaded from
106 /// or saved to disk.
107 saved_version: clock::Global,
108 preview_version: clock::Global,
109 transaction_depth: usize,
110 was_dirty_before_starting_transaction: Option<bool>,
111 reload_task: Option<Task<Result<()>>>,
112 language: Option<Arc<Language>>,
113 autoindent_requests: Vec<Arc<AutoindentRequest>>,
114 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
115 pending_autoindent: Option<Task<()>>,
116 sync_parse_timeout: Option<Duration>,
117 syntax_map: Mutex<SyntaxMap>,
118 reparse: Option<Task<()>>,
119 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
120 non_text_state_update_count: usize,
121 diagnostics: TreeMap<LanguageServerId, DiagnosticSet>,
122 remote_selections: TreeMap<ReplicaId, SelectionSet>,
123 diagnostics_timestamp: clock::Lamport,
124 completion_triggers: BTreeSet<String>,
125 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
126 completion_triggers_timestamp: clock::Lamport,
127 deferred_ops: OperationQueue<Operation>,
128 capability: Capability,
129 has_conflict: bool,
130 /// Memoize calls to has_changes_since(saved_version).
131 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
132 has_unsaved_edits: Cell<(clock::Global, bool)>,
133 change_bits: Vec<rc::Weak<Cell<bool>>>,
134 modeline: Option<Arc<ModelineSettings>>,
135 _subscriptions: Vec<gpui::Subscription>,
136 tree_sitter_data: Arc<TreeSitterData>,
137 encoding: &'static Encoding,
138 has_bom: bool,
139 reload_with_encoding_txns: HashMap<TransactionId, (&'static Encoding, bool)>,
140}
141
142#[derive(Debug)]
143pub struct TreeSitterData {
144 chunks: RowChunks,
145 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
146}
147
148const MAX_ROWS_IN_A_CHUNK: u32 = 50;
149
150impl TreeSitterData {
151 fn clear(&mut self, snapshot: &text::BufferSnapshot) {
152 self.chunks = RowChunks::new(&snapshot, MAX_ROWS_IN_A_CHUNK);
153 self.brackets_by_chunks.get_mut().clear();
154 self.brackets_by_chunks
155 .get_mut()
156 .resize(self.chunks.len(), None);
157 }
158
159 fn new(snapshot: &text::BufferSnapshot) -> Self {
160 let chunks = RowChunks::new(&snapshot, MAX_ROWS_IN_A_CHUNK);
161 Self {
162 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
163 chunks,
164 }
165 }
166
167 fn version(&self) -> &clock::Global {
168 self.chunks.version()
169 }
170}
171
172#[derive(Copy, Clone, Debug, PartialEq, Eq)]
173pub enum ParseStatus {
174 Idle,
175 Parsing,
176}
177
178/// An immutable, cheaply cloneable representation of a fixed
179/// state of a buffer.
180pub struct BufferSnapshot {
181 pub text: text::BufferSnapshot,
182 pub(crate) syntax: SyntaxSnapshot,
183 tree_sitter_data: Arc<TreeSitterData>,
184 diagnostics: TreeMap<LanguageServerId, DiagnosticSet>,
185 remote_selections: TreeMap<ReplicaId, SelectionSet>,
186 language: Option<Arc<Language>>,
187 file: Option<Arc<dyn File>>,
188 non_text_state_update_count: usize,
189 pub capability: Capability,
190 modeline: Option<Arc<ModelineSettings>>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// An operation used to synchronize this buffer with its other replicas.
247#[derive(Clone, Debug, PartialEq)]
248pub enum Operation {
249 /// A text operation.
250 Buffer(text::Operation),
251
252 /// An update to the buffer's diagnostics.
253 UpdateDiagnostics {
254 /// The id of the language server that produced the new diagnostics.
255 server_id: LanguageServerId,
256 /// The diagnostics.
257 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 },
261
262 /// An update to the most recent selections in this buffer.
263 UpdateSelections {
264 /// The selections.
265 selections: Arc<[Selection<Anchor>]>,
266 /// The buffer's lamport timestamp.
267 lamport_timestamp: clock::Lamport,
268 /// Whether the selections are in 'line mode'.
269 line_mode: bool,
270 /// The [`CursorShape`] associated with these selections.
271 cursor_shape: CursorShape,
272 },
273
274 /// An update to the characters that should trigger autocompletion
275 /// for this buffer.
276 UpdateCompletionTriggers {
277 /// The characters that trigger autocompletion.
278 triggers: Vec<String>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// The language server ID.
282 server_id: LanguageServerId,
283 },
284
285 /// An update to the line ending type of this buffer.
286 UpdateLineEnding {
287 /// The line ending type.
288 line_ending: LineEnding,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 },
292}
293
294/// An event that occurs in a buffer.
295#[derive(Clone, Debug, PartialEq)]
296pub enum BufferEvent {
297 /// The buffer was changed in a way that must be
298 /// propagated to its other replicas.
299 Operation {
300 operation: Operation,
301 is_local: bool,
302 },
303 /// The buffer was edited.
304 Edited { is_local: bool },
305 /// The buffer's `dirty` bit changed.
306 DirtyChanged,
307 /// The buffer was saved.
308 Saved,
309 /// The buffer's file was changed on disk.
310 FileHandleChanged,
311 /// The buffer was reloaded.
312 Reloaded,
313 /// The buffer is in need of a reload
314 ReloadNeeded,
315 /// The buffer's language was changed.
316 /// The boolean indicates whether this buffer did not have a language before, but does now.
317 LanguageChanged(bool),
318 /// The buffer's syntax trees were updated.
319 Reparsed,
320 /// The buffer's diagnostics were updated.
321 DiagnosticsUpdated,
322 /// The buffer gained or lost editing capabilities.
323 CapabilityChanged,
324}
325
326/// The file associated with a buffer.
327pub trait File: Send + Sync + Any {
328 /// Returns the [`LocalFile`] associated with this file, if the
329 /// file is local.
330 fn as_local(&self) -> Option<&dyn LocalFile>;
331
332 /// Returns whether this file is local.
333 fn is_local(&self) -> bool {
334 self.as_local().is_some()
335 }
336
337 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
338 /// only available in some states, such as modification time.
339 fn disk_state(&self) -> DiskState;
340
341 /// Returns the path of this file relative to the worktree's root directory.
342 fn path(&self) -> &Arc<RelPath>;
343
344 /// Returns the path of this file relative to the worktree's parent directory (this means it
345 /// includes the name of the worktree's root folder).
346 fn full_path(&self, cx: &App) -> PathBuf;
347
348 /// Returns the path style of this file.
349 fn path_style(&self, cx: &App) -> PathStyle;
350
351 /// Returns the last component of this handle's absolute path. If this handle refers to the root
352 /// of its worktree, then this method will return the name of the worktree itself.
353 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
354
355 /// Returns the id of the worktree to which this file belongs.
356 ///
357 /// This is needed for looking up project-specific settings.
358 fn worktree_id(&self, cx: &App) -> WorktreeId;
359
360 /// Converts this file into a protobuf message.
361 fn to_proto(&self, cx: &App) -> rpc::proto::File;
362
363 /// Return whether Zed considers this to be a private file.
364 fn is_private(&self) -> bool;
365
366 fn can_open(&self) -> bool {
367 !self.is_local()
368 }
369}
370
371/// The file's storage status - whether it's stored (`Present`), and if so when it was last
372/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
373/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
374/// indicator for new files.
375#[derive(Copy, Clone, Debug, PartialEq)]
376pub enum DiskState {
377 /// File created in Zed that has not been saved.
378 New,
379 /// File present on the filesystem.
380 Present { mtime: MTime, size: u64 },
381 /// Deleted file that was previously present.
382 Deleted,
383 /// An old version of a file that was previously present
384 /// usually from a version control system. e.g. A git blob
385 Historic { was_deleted: bool },
386}
387
388impl DiskState {
389 /// Returns the file's last known modification time on disk.
390 pub fn mtime(self) -> Option<MTime> {
391 match self {
392 DiskState::New => None,
393 DiskState::Present { mtime, .. } => Some(mtime),
394 DiskState::Deleted => None,
395 DiskState::Historic { .. } => None,
396 }
397 }
398
399 /// Returns the file's size on disk in bytes.
400 pub fn size(self) -> Option<u64> {
401 match self {
402 DiskState::New => None,
403 DiskState::Present { size, .. } => Some(size),
404 DiskState::Deleted => None,
405 DiskState::Historic { .. } => None,
406 }
407 }
408
409 pub fn exists(&self) -> bool {
410 match self {
411 DiskState::New => false,
412 DiskState::Present { .. } => true,
413 DiskState::Deleted => false,
414 DiskState::Historic { .. } => false,
415 }
416 }
417
418 /// Returns true if this state represents a deleted file.
419 pub fn is_deleted(&self) -> bool {
420 match self {
421 DiskState::Deleted => true,
422 DiskState::Historic { was_deleted } => *was_deleted,
423 _ => false,
424 }
425 }
426}
427
428/// The file associated with a buffer, in the case where the file is on the local disk.
429pub trait LocalFile: File {
430 /// Returns the absolute path of this file
431 fn abs_path(&self, cx: &App) -> PathBuf;
432
433 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
434 fn load(&self, cx: &App) -> Task<Result<String>>;
435
436 /// Loads the file's contents from disk.
437 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
438}
439
440/// The auto-indent behavior associated with an editing operation.
441/// For some editing operations, each affected line of text has its
442/// indentation recomputed. For other operations, the entire block
443/// of edited text is adjusted uniformly.
444#[derive(Clone, Debug)]
445pub enum AutoindentMode {
446 /// Indent each line of inserted text.
447 EachLine,
448 /// Apply the same indentation adjustment to all of the lines
449 /// in a given insertion.
450 Block {
451 /// The original indentation column of the first line of each
452 /// insertion, if it has been copied.
453 ///
454 /// Knowing this makes it possible to preserve the relative indentation
455 /// of every line in the insertion from when it was copied.
456 ///
457 /// If the original indent column is `a`, and the first line of insertion
458 /// is then auto-indented to column `b`, then every other line of
459 /// the insertion will be auto-indented to column `b - a`
460 original_indent_columns: Vec<Option<u32>>,
461 },
462}
463
464#[derive(Clone)]
465struct AutoindentRequest {
466 before_edit: BufferSnapshot,
467 entries: Vec<AutoindentRequestEntry>,
468 is_block_mode: bool,
469 ignore_empty_lines: bool,
470}
471
472#[derive(Debug, Clone)]
473struct AutoindentRequestEntry {
474 /// A range of the buffer whose indentation should be adjusted.
475 range: Range<Anchor>,
476 /// The row of the edit start in the buffer before the edit was applied.
477 /// This is stored here because the anchor in range is created after
478 /// the edit, so it cannot be used with the before_edit snapshot.
479 old_row: Option<u32>,
480 indent_size: IndentSize,
481 original_indent_column: Option<u32>,
482}
483
484#[derive(Debug)]
485struct IndentSuggestion {
486 basis_row: u32,
487 delta: Ordering,
488 within_error: bool,
489}
490
491struct BufferChunkHighlights<'a> {
492 captures: SyntaxMapCaptures<'a>,
493 next_capture: Option<SyntaxMapCapture<'a>>,
494 stack: Vec<(usize, HighlightId)>,
495 highlight_maps: Vec<HighlightMap>,
496}
497
498/// An iterator that yields chunks of a buffer's text, along with their
499/// syntax highlights and diagnostic status.
500pub struct BufferChunks<'a> {
501 buffer_snapshot: Option<&'a BufferSnapshot>,
502 range: Range<usize>,
503 chunks: text::Chunks<'a>,
504 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
505 error_depth: usize,
506 warning_depth: usize,
507 information_depth: usize,
508 hint_depth: usize,
509 unnecessary_depth: usize,
510 underline: bool,
511 highlights: Option<BufferChunkHighlights<'a>>,
512}
513
514/// A chunk of a buffer's text, along with its syntax highlight and
515/// diagnostic status.
516#[derive(Clone, Debug, Default)]
517pub struct Chunk<'a> {
518 /// The text of the chunk.
519 pub text: &'a str,
520 /// The syntax highlighting style of the chunk.
521 pub syntax_highlight_id: Option<HighlightId>,
522 /// The highlight style that has been applied to this chunk in
523 /// the editor.
524 pub highlight_style: Option<HighlightStyle>,
525 /// The severity of diagnostic associated with this chunk, if any.
526 pub diagnostic_severity: Option<DiagnosticSeverity>,
527 /// A bitset of which characters are tabs in this string.
528 pub tabs: u128,
529 /// Bitmap of character indices in this chunk
530 pub chars: u128,
531 /// Bitmap of newline indices in this chunk
532 pub newlines: u128,
533 /// Whether this chunk of text is marked as unnecessary.
534 pub is_unnecessary: bool,
535 /// Whether this chunk of text was originally a tab character.
536 pub is_tab: bool,
537 /// Whether this chunk of text was originally an inlay.
538 pub is_inlay: bool,
539 /// Whether to underline the corresponding text range in the editor.
540 pub underline: bool,
541}
542
543/// A set of edits to a given version of a buffer, computed asynchronously.
544#[derive(Debug, Clone)]
545pub struct Diff {
546 pub base_version: clock::Global,
547 pub line_ending: LineEnding,
548 pub edits: Vec<(Range<usize>, Arc<str>)>,
549}
550
551#[derive(Debug, Clone, Copy)]
552pub(crate) struct DiagnosticEndpoint {
553 offset: usize,
554 is_start: bool,
555 underline: bool,
556 severity: DiagnosticSeverity,
557 is_unnecessary: bool,
558}
559
560/// A class of characters, used for characterizing a run of text.
561#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
562pub enum CharKind {
563 /// Whitespace.
564 Whitespace,
565 /// Punctuation.
566 Punctuation,
567 /// Word.
568 Word,
569}
570
571/// Context for character classification within a specific scope.
572#[derive(Copy, Clone, Eq, PartialEq, Debug)]
573pub enum CharScopeContext {
574 /// Character classification for completion queries.
575 ///
576 /// This context treats certain characters as word constituents that would
577 /// normally be considered punctuation, such as '-' in Tailwind classes
578 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
579 Completion,
580 /// Character classification for linked edits.
581 ///
582 /// This context handles characters that should be treated as part of
583 /// identifiers during linked editing operations, such as '.' in JSX
584 /// component names like `<Animated.View>`.
585 LinkedEdit,
586}
587
588/// A runnable is a set of data about a region that could be resolved into a task
589pub struct Runnable {
590 pub tags: SmallVec<[RunnableTag; 1]>,
591 pub language: Arc<Language>,
592 pub buffer: BufferId,
593}
594
595#[derive(Default, Clone, Debug)]
596pub struct HighlightedText {
597 pub text: SharedString,
598 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
599}
600
601#[derive(Default, Debug)]
602struct HighlightedTextBuilder {
603 pub text: String,
604 highlights: Vec<(Range<usize>, HighlightStyle)>,
605}
606
607impl HighlightedText {
608 pub fn from_buffer_range<T: ToOffset>(
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) -> Self {
615 let mut highlighted_text = HighlightedTextBuilder::default();
616 highlighted_text.add_text_from_buffer_range(
617 range,
618 snapshot,
619 syntax_snapshot,
620 override_style,
621 syntax_theme,
622 );
623 highlighted_text.build()
624 }
625
626 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
627 gpui::StyledText::new(self.text.clone())
628 .with_default_highlights(default_style, self.highlights.iter().cloned())
629 }
630
631 /// Returns the first line without leading whitespace unless highlighted
632 /// and a boolean indicating if there are more lines after
633 pub fn first_line_preview(self) -> (Self, bool) {
634 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
635 let first_line = &self.text[..newline_ix];
636
637 // Trim leading whitespace, unless an edit starts prior to it.
638 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
639 if let Some((first_highlight_range, _)) = self.highlights.first() {
640 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
641 }
642
643 let preview_text = &first_line[preview_start_ix..];
644 let preview_highlights = self
645 .highlights
646 .into_iter()
647 .skip_while(|(range, _)| range.end <= preview_start_ix)
648 .take_while(|(range, _)| range.start < newline_ix)
649 .filter_map(|(mut range, highlight)| {
650 range.start = range.start.saturating_sub(preview_start_ix);
651 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
652 if range.is_empty() {
653 None
654 } else {
655 Some((range, highlight))
656 }
657 });
658
659 let preview = Self {
660 text: SharedString::new(preview_text),
661 highlights: preview_highlights.collect(),
662 };
663
664 (preview, self.text.len() > newline_ix)
665 }
666}
667
668impl HighlightedTextBuilder {
669 pub fn build(self) -> HighlightedText {
670 HighlightedText {
671 text: self.text.into(),
672 highlights: self.highlights,
673 }
674 }
675
676 pub fn add_text_from_buffer_range<T: ToOffset>(
677 &mut self,
678 range: Range<T>,
679 snapshot: &text::BufferSnapshot,
680 syntax_snapshot: &SyntaxSnapshot,
681 override_style: Option<HighlightStyle>,
682 syntax_theme: &SyntaxTheme,
683 ) {
684 let range = range.to_offset(snapshot);
685 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
686 let start = self.text.len();
687 self.text.push_str(chunk.text);
688 let end = self.text.len();
689
690 if let Some(highlight_style) = chunk
691 .syntax_highlight_id
692 .and_then(|id| syntax_theme.get(id).cloned())
693 {
694 let highlight_style = override_style.map_or(highlight_style, |override_style| {
695 highlight_style.highlight(override_style)
696 });
697 self.highlights.push((start..end, highlight_style));
698 } else if let Some(override_style) = override_style {
699 self.highlights.push((start..end, override_style));
700 }
701 }
702 }
703
704 fn highlighted_chunks<'a>(
705 range: Range<usize>,
706 snapshot: &'a text::BufferSnapshot,
707 syntax_snapshot: &'a SyntaxSnapshot,
708 ) -> BufferChunks<'a> {
709 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
710 grammar
711 .highlights_config
712 .as_ref()
713 .map(|config| &config.query)
714 });
715
716 let highlight_maps = captures
717 .grammars()
718 .iter()
719 .map(|grammar| grammar.highlight_map())
720 .collect();
721
722 BufferChunks::new(
723 snapshot.as_rope(),
724 range,
725 Some((captures, highlight_maps)),
726 false,
727 None,
728 )
729 }
730}
731
732#[derive(Clone)]
733pub struct EditPreview {
734 old_snapshot: text::BufferSnapshot,
735 applied_edits_snapshot: text::BufferSnapshot,
736 syntax_snapshot: SyntaxSnapshot,
737}
738
739impl EditPreview {
740 pub fn as_unified_diff(
741 &self,
742 file: Option<&Arc<dyn File>>,
743 edits: &[(Range<Anchor>, impl AsRef<str>)],
744 ) -> Option<String> {
745 let (first, _) = edits.first()?;
746 let (last, _) = edits.last()?;
747
748 let start = first.start.to_point(&self.old_snapshot);
749 let old_end = last.end.to_point(&self.old_snapshot);
750 let new_end = last
751 .end
752 .bias_right(&self.old_snapshot)
753 .to_point(&self.applied_edits_snapshot);
754
755 let start = Point::new(start.row.saturating_sub(3), 0);
756 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
757 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
758
759 let diff_body = unified_diff_with_offsets(
760 &self
761 .old_snapshot
762 .text_for_range(start..old_end)
763 .collect::<String>(),
764 &self
765 .applied_edits_snapshot
766 .text_for_range(start..new_end)
767 .collect::<String>(),
768 start.row,
769 start.row,
770 );
771
772 let path = file.map(|f| f.path().as_unix_str());
773 let header = match path {
774 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
775 None => String::new(),
776 };
777
778 Some(format!("{}{}", header, diff_body))
779 }
780
781 pub fn highlight_edits(
782 &self,
783 current_snapshot: &BufferSnapshot,
784 edits: &[(Range<Anchor>, impl AsRef<str>)],
785 include_deletions: bool,
786 cx: &App,
787 ) -> HighlightedText {
788 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
789 return HighlightedText::default();
790 };
791
792 let mut highlighted_text = HighlightedTextBuilder::default();
793
794 let visible_range_in_preview_snapshot =
795 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
796 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
797
798 let insertion_highlight_style = HighlightStyle {
799 background_color: Some(cx.theme().status().created_background),
800 ..Default::default()
801 };
802 let deletion_highlight_style = HighlightStyle {
803 background_color: Some(cx.theme().status().deleted_background),
804 ..Default::default()
805 };
806 let syntax_theme = cx.theme().syntax();
807
808 for (range, edit_text) in edits {
809 let edit_new_end_in_preview_snapshot = range
810 .end
811 .bias_right(&self.old_snapshot)
812 .to_offset(&self.applied_edits_snapshot);
813 let edit_start_in_preview_snapshot =
814 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
815
816 let unchanged_range_in_preview_snapshot =
817 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
818 if !unchanged_range_in_preview_snapshot.is_empty() {
819 highlighted_text.add_text_from_buffer_range(
820 unchanged_range_in_preview_snapshot,
821 &self.applied_edits_snapshot,
822 &self.syntax_snapshot,
823 None,
824 syntax_theme,
825 );
826 }
827
828 let range_in_current_snapshot = range.to_offset(current_snapshot);
829 if include_deletions && !range_in_current_snapshot.is_empty() {
830 highlighted_text.add_text_from_buffer_range(
831 range_in_current_snapshot,
832 ¤t_snapshot.text,
833 ¤t_snapshot.syntax,
834 Some(deletion_highlight_style),
835 syntax_theme,
836 );
837 }
838
839 if !edit_text.as_ref().is_empty() {
840 highlighted_text.add_text_from_buffer_range(
841 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
842 &self.applied_edits_snapshot,
843 &self.syntax_snapshot,
844 Some(insertion_highlight_style),
845 syntax_theme,
846 );
847 }
848
849 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
850 }
851
852 highlighted_text.add_text_from_buffer_range(
853 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
854 &self.applied_edits_snapshot,
855 &self.syntax_snapshot,
856 None,
857 syntax_theme,
858 );
859
860 highlighted_text.build()
861 }
862
863 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
864 cx.new(|cx| {
865 let mut buffer = Buffer::local_normalized(
866 self.applied_edits_snapshot.as_rope().clone(),
867 self.applied_edits_snapshot.line_ending(),
868 cx,
869 );
870 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
871 buffer
872 })
873 }
874
875 pub fn anchor_to_offset_in_result(&self, anchor: Anchor) -> usize {
876 anchor
877 .bias_right(&self.old_snapshot)
878 .to_offset(&self.applied_edits_snapshot)
879 }
880
881 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
882 let (first, _) = edits.first()?;
883 let (last, _) = edits.last()?;
884
885 let start = first
886 .start
887 .bias_left(&self.old_snapshot)
888 .to_point(&self.applied_edits_snapshot);
889 let end = last
890 .end
891 .bias_right(&self.old_snapshot)
892 .to_point(&self.applied_edits_snapshot);
893
894 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
895 let range = Point::new(start.row, 0)
896 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
897
898 Some(range)
899 }
900}
901
902#[derive(Clone, Debug, PartialEq, Eq)]
903pub struct BracketMatch<T> {
904 pub open_range: Range<T>,
905 pub close_range: Range<T>,
906 pub newline_only: bool,
907 pub syntax_layer_depth: usize,
908 pub color_index: Option<usize>,
909}
910
911impl<T> BracketMatch<T> {
912 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
913 (self.open_range, self.close_range)
914 }
915}
916
917impl Buffer {
918 /// Create a new buffer with the given base text.
919 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
920 Self::build(
921 TextBuffer::new(
922 ReplicaId::LOCAL,
923 cx.entity_id().as_non_zero_u64().into(),
924 base_text.into(),
925 ),
926 None,
927 Capability::ReadWrite,
928 )
929 }
930
931 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
932 pub fn local_normalized(
933 base_text_normalized: Rope,
934 line_ending: LineEnding,
935 cx: &Context<Self>,
936 ) -> Self {
937 Self::build(
938 TextBuffer::new_normalized(
939 ReplicaId::LOCAL,
940 cx.entity_id().as_non_zero_u64().into(),
941 line_ending,
942 base_text_normalized,
943 ),
944 None,
945 Capability::ReadWrite,
946 )
947 }
948
949 /// Create a new buffer that is a replica of a remote buffer.
950 pub fn remote(
951 remote_id: BufferId,
952 replica_id: ReplicaId,
953 capability: Capability,
954 base_text: impl Into<String>,
955 ) -> Self {
956 Self::build(
957 TextBuffer::new(replica_id, remote_id, base_text.into()),
958 None,
959 capability,
960 )
961 }
962
963 /// Create a new buffer that is a replica of a remote buffer, populating its
964 /// state from the given protobuf message.
965 pub fn from_proto(
966 replica_id: ReplicaId,
967 capability: Capability,
968 message: proto::BufferState,
969 file: Option<Arc<dyn File>>,
970 ) -> Result<Self> {
971 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
972 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
973 let mut this = Self::build(buffer, file, capability);
974 this.text.set_line_ending(proto::deserialize_line_ending(
975 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
976 ));
977 this.saved_version = proto::deserialize_version(&message.saved_version);
978 this.saved_mtime = message.saved_mtime.map(|time| time.into());
979 Ok(this)
980 }
981
982 /// Serialize the buffer's state to a protobuf message.
983 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
984 proto::BufferState {
985 id: self.remote_id().into(),
986 file: self.file.as_ref().map(|f| f.to_proto(cx)),
987 base_text: self.base_text().to_string(),
988 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
989 saved_version: proto::serialize_version(&self.saved_version),
990 saved_mtime: self.saved_mtime.map(|time| time.into()),
991 }
992 }
993
994 /// Serialize as protobufs all of the changes to the buffer since the given version.
995 pub fn serialize_ops(
996 &self,
997 since: Option<clock::Global>,
998 cx: &App,
999 ) -> Task<Vec<proto::Operation>> {
1000 let mut operations = Vec::new();
1001 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1002
1003 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1004 proto::serialize_operation(&Operation::UpdateSelections {
1005 selections: set.selections.clone(),
1006 lamport_timestamp: set.lamport_timestamp,
1007 line_mode: set.line_mode,
1008 cursor_shape: set.cursor_shape,
1009 })
1010 }));
1011
1012 for (server_id, diagnostics) in self.diagnostics.iter() {
1013 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1014 lamport_timestamp: self.diagnostics_timestamp,
1015 server_id: *server_id,
1016 diagnostics: diagnostics.iter().cloned().collect(),
1017 }));
1018 }
1019
1020 for (server_id, completions) in &self.completion_triggers_per_language_server {
1021 operations.push(proto::serialize_operation(
1022 &Operation::UpdateCompletionTriggers {
1023 triggers: completions.iter().cloned().collect(),
1024 lamport_timestamp: self.completion_triggers_timestamp,
1025 server_id: *server_id,
1026 },
1027 ));
1028 }
1029
1030 let text_operations = self.text.operations().clone();
1031 cx.background_spawn(async move {
1032 let since = since.unwrap_or_default();
1033 operations.extend(
1034 text_operations
1035 .iter()
1036 .filter(|(_, op)| !since.observed(op.timestamp()))
1037 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1038 );
1039 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1040 operations
1041 })
1042 }
1043
1044 /// Assign a language to the buffer, returning the buffer.
1045 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1046 self.set_language_async(Some(language), cx);
1047 self
1048 }
1049
1050 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1051 #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1052 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1053 self.set_language(Some(language), cx);
1054 self
1055 }
1056
1057 /// Returns the [`Capability`] of this buffer.
1058 pub fn capability(&self) -> Capability {
1059 self.capability
1060 }
1061
1062 /// Whether this buffer can only be read.
1063 pub fn read_only(&self) -> bool {
1064 !self.capability.editable()
1065 }
1066
1067 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1068 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1069 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1070 let snapshot = buffer.snapshot();
1071 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1072 let tree_sitter_data = TreeSitterData::new(snapshot);
1073 Self {
1074 saved_mtime,
1075 tree_sitter_data: Arc::new(tree_sitter_data),
1076 saved_version: buffer.version(),
1077 preview_version: buffer.version(),
1078 reload_task: None,
1079 transaction_depth: 0,
1080 was_dirty_before_starting_transaction: None,
1081 has_unsaved_edits: Cell::new((buffer.version(), false)),
1082 text: buffer,
1083
1084 file,
1085 capability,
1086 syntax_map,
1087 reparse: None,
1088 non_text_state_update_count: 0,
1089 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1090 Some(Duration::from_millis(10))
1091 } else {
1092 Some(Duration::from_millis(1))
1093 },
1094 parse_status: watch::channel(ParseStatus::Idle),
1095 autoindent_requests: Default::default(),
1096 wait_for_autoindent_txs: Default::default(),
1097 pending_autoindent: Default::default(),
1098 language: None,
1099 remote_selections: Default::default(),
1100 diagnostics: Default::default(),
1101 diagnostics_timestamp: Lamport::MIN,
1102 completion_triggers: Default::default(),
1103 completion_triggers_per_language_server: Default::default(),
1104 completion_triggers_timestamp: Lamport::MIN,
1105 deferred_ops: OperationQueue::new(),
1106 has_conflict: false,
1107 change_bits: Default::default(),
1108 modeline: None,
1109 _subscriptions: Vec::new(),
1110 encoding: encoding_rs::UTF_8,
1111 has_bom: false,
1112 reload_with_encoding_txns: HashMap::default(),
1113 }
1114 }
1115
1116 #[ztracing::instrument(skip_all)]
1117 pub fn build_snapshot(
1118 text: Rope,
1119 language: Option<Arc<Language>>,
1120 language_registry: Option<Arc<LanguageRegistry>>,
1121 modeline: Option<Arc<ModelineSettings>>,
1122 cx: &mut App,
1123 ) -> impl Future<Output = BufferSnapshot> + use<> {
1124 let entity_id = cx.reserve_entity::<Self>().entity_id();
1125 let buffer_id = entity_id.as_non_zero_u64().into();
1126 async move {
1127 let text =
1128 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text);
1129 let text = text.into_snapshot();
1130 let mut syntax = SyntaxMap::new(&text).snapshot();
1131 if let Some(language) = language.clone() {
1132 let language_registry = language_registry.clone();
1133 syntax.reparse(&text, language_registry, language);
1134 }
1135 let tree_sitter_data = TreeSitterData::new(&text);
1136 BufferSnapshot {
1137 text,
1138 syntax,
1139 file: None,
1140 diagnostics: Default::default(),
1141 remote_selections: Default::default(),
1142 tree_sitter_data: Arc::new(tree_sitter_data),
1143 language,
1144 non_text_state_update_count: 0,
1145 capability: Capability::ReadOnly,
1146 modeline,
1147 }
1148 }
1149 }
1150
1151 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1152 let entity_id = cx.reserve_entity::<Self>().entity_id();
1153 let buffer_id = entity_id.as_non_zero_u64().into();
1154 let text = TextBuffer::new_normalized(
1155 ReplicaId::LOCAL,
1156 buffer_id,
1157 Default::default(),
1158 Rope::new(),
1159 );
1160 let text = text.into_snapshot();
1161 let syntax = SyntaxMap::new(&text).snapshot();
1162 let tree_sitter_data = TreeSitterData::new(&text);
1163 BufferSnapshot {
1164 text,
1165 syntax,
1166 tree_sitter_data: Arc::new(tree_sitter_data),
1167 file: None,
1168 diagnostics: Default::default(),
1169 remote_selections: Default::default(),
1170 language: None,
1171 non_text_state_update_count: 0,
1172 capability: Capability::ReadOnly,
1173 modeline: None,
1174 }
1175 }
1176
1177 #[cfg(any(test, feature = "test-support"))]
1178 pub fn build_snapshot_sync(
1179 text: Rope,
1180 language: Option<Arc<Language>>,
1181 language_registry: Option<Arc<LanguageRegistry>>,
1182 cx: &mut App,
1183 ) -> BufferSnapshot {
1184 let entity_id = cx.reserve_entity::<Self>().entity_id();
1185 let buffer_id = entity_id.as_non_zero_u64().into();
1186 let text =
1187 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1188 .into_snapshot();
1189 let mut syntax = SyntaxMap::new(&text).snapshot();
1190 if let Some(language) = language.clone() {
1191 syntax.reparse(&text, language_registry, language);
1192 }
1193 let tree_sitter_data = TreeSitterData::new(&text);
1194 BufferSnapshot {
1195 text,
1196 syntax,
1197 tree_sitter_data: Arc::new(tree_sitter_data),
1198 file: None,
1199 diagnostics: Default::default(),
1200 remote_selections: Default::default(),
1201 language,
1202 non_text_state_update_count: 0,
1203 capability: Capability::ReadOnly,
1204 modeline: None,
1205 }
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's current state. This is computationally
1209 /// cheap, and allows reading from the buffer on a background thread.
1210 pub fn snapshot(&self) -> BufferSnapshot {
1211 let text = self.text.snapshot();
1212
1213 let syntax = {
1214 let mut syntax_map = self.syntax_map.lock();
1215 syntax_map.interpolate(text);
1216 syntax_map.snapshot()
1217 };
1218
1219 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1220 Arc::new(TreeSitterData::new(text))
1221 } else {
1222 self.tree_sitter_data.clone()
1223 };
1224
1225 BufferSnapshot {
1226 text: text.clone(),
1227 syntax,
1228 tree_sitter_data,
1229 file: self.file.clone(),
1230 remote_selections: self.remote_selections.clone(),
1231 diagnostics: self.diagnostics.clone(),
1232 language: self.language.clone(),
1233 non_text_state_update_count: self.non_text_state_update_count,
1234 capability: self.capability,
1235 modeline: self.modeline.clone(),
1236 }
1237 }
1238
1239 #[ztracing::instrument(skip_all)]
1240 pub fn preview_edits(
1241 &self,
1242 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1243 cx: &App,
1244 ) -> Task<EditPreview> {
1245 let registry = self.language_registry();
1246 let language = self.language().cloned();
1247 let old_snapshot = self.text.snapshot().clone();
1248 let mut new_snapshot = old_snapshot.clone();
1249 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1250 cx.background_spawn(async move {
1251 if !edits.is_empty() {
1252 if let Some(language) = language.clone() {
1253 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1254 }
1255
1256 new_snapshot.edit(edits.iter().cloned());
1257 syntax_snapshot.interpolate(&new_snapshot);
1258
1259 if let Some(language) = language {
1260 syntax_snapshot.reparse(&new_snapshot, registry, language);
1261 }
1262 }
1263 EditPreview {
1264 old_snapshot,
1265 applied_edits_snapshot: new_snapshot,
1266 syntax_snapshot,
1267 }
1268 })
1269 }
1270
1271 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1272 &self.text
1273 }
1274
1275 /// Retrieve a snapshot of the buffer's raw text, without any
1276 /// language-related state like the syntax tree or diagnostics.
1277 #[ztracing::instrument(skip_all)]
1278 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1279 // todo lw
1280 self.text.snapshot().clone()
1281 }
1282
1283 /// The file associated with the buffer, if any.
1284 pub fn file(&self) -> Option<&Arc<dyn File>> {
1285 self.file.as_ref()
1286 }
1287
1288 /// The version of the buffer that was last saved or reloaded from disk.
1289 pub fn saved_version(&self) -> &clock::Global {
1290 &self.saved_version
1291 }
1292
1293 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1294 pub fn saved_mtime(&self) -> Option<MTime> {
1295 self.saved_mtime
1296 }
1297
1298 /// Returns the character encoding of the buffer's file.
1299 pub fn encoding(&self) -> &'static Encoding {
1300 self.encoding
1301 }
1302
1303 /// Sets the character encoding of the buffer.
1304 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1305 self.encoding = encoding;
1306 }
1307
1308 /// Returns whether the buffer has a Byte Order Mark.
1309 pub fn has_bom(&self) -> bool {
1310 self.has_bom
1311 }
1312
1313 /// Sets whether the buffer has a Byte Order Mark.
1314 pub fn set_has_bom(&mut self, has_bom: bool) {
1315 self.has_bom = has_bom;
1316 }
1317
1318 /// Assign a language to the buffer.
1319 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1320 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1321 }
1322
1323 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1324 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1325 self.set_language_(language, true, cx);
1326 }
1327
1328 #[ztracing::instrument(skip_all)]
1329 fn set_language_(
1330 &mut self,
1331 language: Option<Arc<Language>>,
1332 may_block: bool,
1333 cx: &mut Context<Self>,
1334 ) {
1335 if language == self.language {
1336 return;
1337 }
1338 self.non_text_state_update_count += 1;
1339 self.syntax_map.lock().clear(&self.text);
1340 let old_language = std::mem::replace(&mut self.language, language);
1341 self.was_changed();
1342 self.reparse(cx, may_block);
1343 let has_fresh_language =
1344 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1345 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1346 }
1347
1348 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1349 /// other languages if parts of the buffer are written in different languages.
1350 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1351 self.syntax_map
1352 .lock()
1353 .set_language_registry(language_registry);
1354 }
1355
1356 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1357 self.syntax_map.lock().language_registry()
1358 }
1359
1360 /// Assign the line ending type to the buffer.
1361 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1362 self.text.set_line_ending(line_ending);
1363
1364 let lamport_timestamp = self.text.lamport_clock.tick();
1365 self.send_operation(
1366 Operation::UpdateLineEnding {
1367 line_ending,
1368 lamport_timestamp,
1369 },
1370 true,
1371 cx,
1372 );
1373 }
1374
1375 /// Assign the buffer [`ModelineSettings`].
1376 pub fn set_modeline(&mut self, modeline: Option<ModelineSettings>) -> bool {
1377 if modeline.as_ref() != self.modeline.as_deref() {
1378 self.modeline = modeline.map(Arc::new);
1379 true
1380 } else {
1381 false
1382 }
1383 }
1384
1385 /// Returns the [`ModelineSettings`].
1386 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
1387 self.modeline.as_ref()
1388 }
1389
1390 /// Assign the buffer a new [`Capability`].
1391 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1392 if self.capability != capability {
1393 self.capability = capability;
1394 cx.emit(BufferEvent::CapabilityChanged)
1395 }
1396 }
1397
1398 /// This method is called to signal that the buffer has been saved.
1399 pub fn did_save(
1400 &mut self,
1401 version: clock::Global,
1402 mtime: Option<MTime>,
1403 cx: &mut Context<Self>,
1404 ) {
1405 self.saved_version = version.clone();
1406 self.has_unsaved_edits.set((version, false));
1407 self.has_conflict = false;
1408 self.saved_mtime = mtime;
1409 self.was_changed();
1410 cx.emit(BufferEvent::Saved);
1411 cx.notify();
1412 }
1413
1414 /// Reloads the contents of the buffer from disk.
1415 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1416 self.reload_impl(None, cx)
1417 }
1418
1419 /// Reloads the contents of the buffer from disk using the specified encoding.
1420 ///
1421 /// This bypasses automatic encoding detection heuristics (like BOM checks) for non-Unicode encodings,
1422 /// allowing users to force a specific interpretation of the bytes.
1423 pub fn reload_with_encoding(
1424 &mut self,
1425 encoding: &'static Encoding,
1426 cx: &Context<Self>,
1427 ) -> oneshot::Receiver<Option<Transaction>> {
1428 self.reload_impl(Some(encoding), cx)
1429 }
1430
1431 fn reload_impl(
1432 &mut self,
1433 force_encoding: Option<&'static Encoding>,
1434 cx: &Context<Self>,
1435 ) -> oneshot::Receiver<Option<Transaction>> {
1436 let (tx, rx) = futures::channel::oneshot::channel();
1437 let prev_version = self.text.version();
1438
1439 self.reload_task = Some(cx.spawn(async move |this, cx| {
1440 let Some((new_mtime, load_bytes_task, current_encoding)) =
1441 this.update(cx, |this, cx| {
1442 let file = this.file.as_ref()?.as_local()?;
1443 Some((
1444 file.disk_state().mtime(),
1445 file.load_bytes(cx),
1446 this.encoding,
1447 ))
1448 })?
1449 else {
1450 return Ok(());
1451 };
1452
1453 let target_encoding = force_encoding.unwrap_or(current_encoding);
1454
1455 let is_unicode = target_encoding == encoding_rs::UTF_8
1456 || target_encoding == encoding_rs::UTF_16LE
1457 || target_encoding == encoding_rs::UTF_16BE;
1458
1459 let (new_text, has_bom, encoding_used) = if force_encoding.is_some() && !is_unicode {
1460 let bytes = load_bytes_task.await?;
1461 let (cow, _had_errors) = target_encoding.decode_without_bom_handling(&bytes);
1462 (cow.into_owned(), false, target_encoding)
1463 } else {
1464 let bytes = load_bytes_task.await?;
1465 let (cow, used_enc, _had_errors) = target_encoding.decode(&bytes);
1466
1467 let actual_has_bom = if used_enc == encoding_rs::UTF_8 {
1468 bytes.starts_with(&[0xEF, 0xBB, 0xBF])
1469 } else if used_enc == encoding_rs::UTF_16LE {
1470 bytes.starts_with(&[0xFF, 0xFE])
1471 } else if used_enc == encoding_rs::UTF_16BE {
1472 bytes.starts_with(&[0xFE, 0xFF])
1473 } else {
1474 false
1475 };
1476 (cow.into_owned(), actual_has_bom, used_enc)
1477 };
1478
1479 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1480 this.update(cx, |this, cx| {
1481 if this.version() == diff.base_version {
1482 this.finalize_last_transaction();
1483 let old_encoding = this.encoding;
1484 let old_has_bom = this.has_bom;
1485 this.apply_diff(diff, cx);
1486 this.encoding = encoding_used;
1487 this.has_bom = has_bom;
1488 let transaction = this.finalize_last_transaction().cloned();
1489 if let Some(ref txn) = transaction {
1490 if old_encoding != encoding_used || old_has_bom != has_bom {
1491 this.reload_with_encoding_txns
1492 .insert(txn.id, (old_encoding, old_has_bom));
1493 }
1494 }
1495 tx.send(transaction).ok();
1496 this.has_conflict = false;
1497 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1498 } else {
1499 if !diff.edits.is_empty()
1500 || this
1501 .edits_since::<usize>(&diff.base_version)
1502 .next()
1503 .is_some()
1504 {
1505 this.has_conflict = true;
1506 }
1507
1508 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1509 }
1510
1511 this.reload_task.take();
1512 })
1513 }));
1514 rx
1515 }
1516
1517 /// This method is called to signal that the buffer has been reloaded.
1518 pub fn did_reload(
1519 &mut self,
1520 version: clock::Global,
1521 line_ending: LineEnding,
1522 mtime: Option<MTime>,
1523 cx: &mut Context<Self>,
1524 ) {
1525 self.saved_version = version;
1526 self.has_unsaved_edits
1527 .set((self.saved_version.clone(), false));
1528 self.text.set_line_ending(line_ending);
1529 self.saved_mtime = mtime;
1530 cx.emit(BufferEvent::Reloaded);
1531 cx.notify();
1532 }
1533
1534 /// Updates the [`File`] backing this buffer. This should be called when
1535 /// the file has changed or has been deleted.
1536 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1537 let was_dirty = self.is_dirty();
1538 let mut file_changed = false;
1539
1540 if let Some(old_file) = self.file.as_ref() {
1541 if new_file.path() != old_file.path() {
1542 file_changed = true;
1543 }
1544
1545 let old_state = old_file.disk_state();
1546 let new_state = new_file.disk_state();
1547 if old_state != new_state {
1548 file_changed = true;
1549 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1550 cx.emit(BufferEvent::ReloadNeeded)
1551 }
1552 }
1553 } else {
1554 file_changed = true;
1555 };
1556
1557 self.file = Some(new_file);
1558 if file_changed {
1559 self.was_changed();
1560 self.non_text_state_update_count += 1;
1561 if was_dirty != self.is_dirty() {
1562 cx.emit(BufferEvent::DirtyChanged);
1563 }
1564 cx.emit(BufferEvent::FileHandleChanged);
1565 cx.notify();
1566 }
1567 }
1568
1569 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1570 pub fn language(&self) -> Option<&Arc<Language>> {
1571 self.language.as_ref()
1572 }
1573
1574 /// Returns the [`Language`] at the given location.
1575 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1576 let offset = position.to_offset(self);
1577 let text: &TextBufferSnapshot = &self.text;
1578 self.syntax_map
1579 .lock()
1580 .layers_for_range(offset..offset, text, false)
1581 .filter(|layer| {
1582 layer
1583 .included_sub_ranges
1584 .is_none_or(|ranges| offset_in_sub_ranges(ranges, offset, text))
1585 })
1586 .last()
1587 .map(|info| info.language.clone())
1588 .or_else(|| self.language.clone())
1589 }
1590
1591 /// Returns each [`Language`] for the active syntax layers at the given location.
1592 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1593 let offset = position.to_offset(self);
1594 let text: &TextBufferSnapshot = &self.text;
1595 let mut languages: Vec<Arc<Language>> = self
1596 .syntax_map
1597 .lock()
1598 .layers_for_range(offset..offset, text, false)
1599 .filter(|layer| {
1600 // For combined injections, check if offset is within the actual sub-ranges.
1601 layer
1602 .included_sub_ranges
1603 .is_none_or(|ranges| offset_in_sub_ranges(ranges, offset, text))
1604 })
1605 .map(|info| info.language.clone())
1606 .collect();
1607
1608 if languages.is_empty()
1609 && let Some(buffer_language) = self.language()
1610 {
1611 languages.push(buffer_language.clone());
1612 }
1613
1614 languages
1615 }
1616
1617 /// An integer version number that accounts for all updates besides
1618 /// the buffer's text itself (which is versioned via a version vector).
1619 pub fn non_text_state_update_count(&self) -> usize {
1620 self.non_text_state_update_count
1621 }
1622
1623 /// Whether the buffer is being parsed in the background.
1624 #[cfg(any(test, feature = "test-support"))]
1625 pub fn is_parsing(&self) -> bool {
1626 self.reparse.is_some()
1627 }
1628
1629 /// Indicates whether the buffer contains any regions that may be
1630 /// written in a language that hasn't been loaded yet.
1631 pub fn contains_unknown_injections(&self) -> bool {
1632 self.syntax_map.lock().contains_unknown_injections()
1633 }
1634
1635 /// Sets the sync parse timeout for this buffer.
1636 ///
1637 /// Setting this to `None` disables sync parsing entirely.
1638 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1639 self.sync_parse_timeout = timeout;
1640 }
1641
1642 fn invalidate_tree_sitter_data(
1643 tree_sitter_data: &mut Arc<TreeSitterData>,
1644 snapshot: &text::BufferSnapshot,
1645 ) {
1646 match Arc::get_mut(tree_sitter_data) {
1647 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1648 None => {
1649 let new_tree_sitter_data = TreeSitterData::new(snapshot);
1650 *tree_sitter_data = Arc::new(new_tree_sitter_data)
1651 }
1652 }
1653 }
1654
1655 /// Called after an edit to synchronize the buffer's main parse tree with
1656 /// the buffer's new underlying state.
1657 ///
1658 /// Locks the syntax map and interpolates the edits since the last reparse
1659 /// into the foreground syntax tree.
1660 ///
1661 /// Then takes a stable snapshot of the syntax map before unlocking it.
1662 /// The snapshot with the interpolated edits is sent to a background thread,
1663 /// where we ask Tree-sitter to perform an incremental parse.
1664 ///
1665 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1666 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1667 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1668 ///
1669 /// If we time out waiting on the parse, we spawn a second task waiting
1670 /// until the parse does complete and return with the interpolated tree still
1671 /// in the foreground. When the background parse completes, call back into
1672 /// the main thread and assign the foreground parse state.
1673 ///
1674 /// If the buffer or grammar changed since the start of the background parse,
1675 /// initiate an additional reparse recursively. To avoid concurrent parses
1676 /// for the same buffer, we only initiate a new parse if we are not already
1677 /// parsing in the background.
1678 #[ztracing::instrument(skip_all)]
1679 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1680 if self.text.version() != *self.tree_sitter_data.version() {
1681 Self::invalidate_tree_sitter_data(&mut self.tree_sitter_data, self.text.snapshot());
1682 }
1683 if self.reparse.is_some() {
1684 return;
1685 }
1686 let language = if let Some(language) = self.language.clone() {
1687 language
1688 } else {
1689 return;
1690 };
1691
1692 let text = self.text_snapshot();
1693 let parsed_version = self.version();
1694
1695 let mut syntax_map = self.syntax_map.lock();
1696 syntax_map.interpolate(&text);
1697 let language_registry = syntax_map.language_registry();
1698 let mut syntax_snapshot = syntax_map.snapshot();
1699 drop(syntax_map);
1700
1701 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1702 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1703 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1704 &text,
1705 language_registry.clone(),
1706 language.clone(),
1707 sync_parse_timeout,
1708 ) {
1709 self.did_finish_parsing(syntax_snapshot, Some(Duration::from_millis(300)), cx);
1710 self.reparse = None;
1711 return;
1712 }
1713 }
1714
1715 let parse_task = cx.background_spawn({
1716 let language = language.clone();
1717 let language_registry = language_registry.clone();
1718 async move {
1719 syntax_snapshot.reparse(&text, language_registry, language);
1720 syntax_snapshot
1721 }
1722 });
1723
1724 self.reparse = Some(cx.spawn(async move |this, cx| {
1725 let new_syntax_map = parse_task.await;
1726 this.update(cx, move |this, cx| {
1727 let grammar_changed = || {
1728 this.language
1729 .as_ref()
1730 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1731 };
1732 let language_registry_changed = || {
1733 new_syntax_map.contains_unknown_injections()
1734 && language_registry.is_some_and(|registry| {
1735 registry.version() != new_syntax_map.language_registry_version()
1736 })
1737 };
1738 let parse_again = this.version.changed_since(&parsed_version)
1739 || language_registry_changed()
1740 || grammar_changed();
1741 this.did_finish_parsing(new_syntax_map, None, cx);
1742 this.reparse = None;
1743 if parse_again {
1744 this.reparse(cx, false);
1745 }
1746 })
1747 .ok();
1748 }));
1749 }
1750
1751 fn did_finish_parsing(
1752 &mut self,
1753 syntax_snapshot: SyntaxSnapshot,
1754 block_budget: Option<Duration>,
1755 cx: &mut Context<Self>,
1756 ) {
1757 self.non_text_state_update_count += 1;
1758 self.syntax_map.lock().did_parse(syntax_snapshot);
1759 self.was_changed();
1760 self.request_autoindent(cx, block_budget);
1761 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1762 Self::invalidate_tree_sitter_data(&mut self.tree_sitter_data, &self.text.snapshot());
1763 cx.emit(BufferEvent::Reparsed);
1764 cx.notify();
1765 }
1766
1767 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1768 self.parse_status.1.clone()
1769 }
1770
1771 /// Wait until the buffer is no longer parsing
1772 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1773 let mut parse_status = self.parse_status();
1774 async move {
1775 while *parse_status.borrow() != ParseStatus::Idle {
1776 if parse_status.changed().await.is_err() {
1777 break;
1778 }
1779 }
1780 }
1781 }
1782
1783 /// Assign to the buffer a set of diagnostics created by a given language server.
1784 pub fn update_diagnostics(
1785 &mut self,
1786 server_id: LanguageServerId,
1787 diagnostics: DiagnosticSet,
1788 cx: &mut Context<Self>,
1789 ) {
1790 let lamport_timestamp = self.text.lamport_clock.tick();
1791 let op = Operation::UpdateDiagnostics {
1792 server_id,
1793 diagnostics: diagnostics.iter().cloned().collect(),
1794 lamport_timestamp,
1795 };
1796
1797 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1798 self.send_operation(op, true, cx);
1799 }
1800
1801 pub fn buffer_diagnostics(
1802 &self,
1803 for_server: Option<LanguageServerId>,
1804 ) -> Vec<&DiagnosticEntry<Anchor>> {
1805 match for_server {
1806 Some(server_id) => self
1807 .diagnostics
1808 .get(&server_id)
1809 .map_or_else(Vec::new, |diagnostics| diagnostics.iter().collect()),
1810 None => self
1811 .diagnostics
1812 .iter()
1813 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1814 .collect(),
1815 }
1816 }
1817
1818 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Option<Duration>) {
1819 if let Some(indent_sizes) = self.compute_autoindents() {
1820 let indent_sizes = cx.background_spawn(indent_sizes);
1821 let Some(block_budget) = block_budget else {
1822 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1823 let indent_sizes = indent_sizes.await;
1824 this.update(cx, |this, cx| {
1825 this.apply_autoindents(indent_sizes, cx);
1826 })
1827 .ok();
1828 }));
1829 return;
1830 };
1831 match cx
1832 .foreground_executor()
1833 .block_with_timeout(block_budget, indent_sizes)
1834 {
1835 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1836 Err(indent_sizes) => {
1837 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1838 let indent_sizes = indent_sizes.await;
1839 this.update(cx, |this, cx| {
1840 this.apply_autoindents(indent_sizes, cx);
1841 })
1842 .ok();
1843 }));
1844 }
1845 }
1846 } else {
1847 self.autoindent_requests.clear();
1848 for tx in self.wait_for_autoindent_txs.drain(..) {
1849 tx.send(()).ok();
1850 }
1851 }
1852 }
1853
1854 fn compute_autoindents(
1855 &self,
1856 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1857 let max_rows_between_yields = 100;
1858 let snapshot = self.snapshot();
1859 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1860 return None;
1861 }
1862
1863 let autoindent_requests = self.autoindent_requests.clone();
1864 Some(async move {
1865 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1866 for request in autoindent_requests {
1867 // Resolve each edited range to its row in the current buffer and in the
1868 // buffer before this batch of edits.
1869 let mut row_ranges = Vec::new();
1870 let mut old_to_new_rows = BTreeMap::new();
1871 let mut language_indent_sizes_by_new_row = Vec::new();
1872 for entry in &request.entries {
1873 let position = entry.range.start;
1874 let new_row = position.to_point(&snapshot).row;
1875 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1876 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1877
1878 if let Some(old_row) = entry.old_row {
1879 old_to_new_rows.insert(old_row, new_row);
1880 }
1881 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1882 }
1883
1884 // Build a map containing the suggested indentation for each of the edited lines
1885 // with respect to the state of the buffer before these edits. This map is keyed
1886 // by the rows for these lines in the current state of the buffer.
1887 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1888 let old_edited_ranges =
1889 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1890 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1891 let mut language_indent_size = IndentSize::default();
1892 for old_edited_range in old_edited_ranges {
1893 let suggestions = request
1894 .before_edit
1895 .suggest_autoindents(old_edited_range.clone())
1896 .into_iter()
1897 .flatten();
1898 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1899 if let Some(suggestion) = suggestion {
1900 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1901
1902 // Find the indent size based on the language for this row.
1903 while let Some((row, size)) = language_indent_sizes.peek() {
1904 if *row > new_row {
1905 break;
1906 }
1907 language_indent_size = *size;
1908 language_indent_sizes.next();
1909 }
1910
1911 let suggested_indent = old_to_new_rows
1912 .get(&suggestion.basis_row)
1913 .and_then(|from_row| {
1914 Some(old_suggestions.get(from_row).copied()?.0)
1915 })
1916 .unwrap_or_else(|| {
1917 request
1918 .before_edit
1919 .indent_size_for_line(suggestion.basis_row)
1920 })
1921 .with_delta(suggestion.delta, language_indent_size);
1922 old_suggestions
1923 .insert(new_row, (suggested_indent, suggestion.within_error));
1924 }
1925 }
1926 yield_now().await;
1927 }
1928
1929 // Compute new suggestions for each line, but only include them in the result
1930 // if they differ from the old suggestion for that line.
1931 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1932 let mut language_indent_size = IndentSize::default();
1933 for (row_range, original_indent_column) in row_ranges {
1934 let new_edited_row_range = if request.is_block_mode {
1935 row_range.start..row_range.start + 1
1936 } else {
1937 row_range.clone()
1938 };
1939
1940 let suggestions = snapshot
1941 .suggest_autoindents(new_edited_row_range.clone())
1942 .into_iter()
1943 .flatten();
1944 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1945 if let Some(suggestion) = suggestion {
1946 // Find the indent size based on the language for this row.
1947 while let Some((row, size)) = language_indent_sizes.peek() {
1948 if *row > new_row {
1949 break;
1950 }
1951 language_indent_size = *size;
1952 language_indent_sizes.next();
1953 }
1954
1955 let suggested_indent = indent_sizes
1956 .get(&suggestion.basis_row)
1957 .copied()
1958 .map(|e| e.0)
1959 .unwrap_or_else(|| {
1960 snapshot.indent_size_for_line(suggestion.basis_row)
1961 })
1962 .with_delta(suggestion.delta, language_indent_size);
1963
1964 if old_suggestions.get(&new_row).is_none_or(
1965 |(old_indentation, was_within_error)| {
1966 suggested_indent != *old_indentation
1967 && (!suggestion.within_error || *was_within_error)
1968 },
1969 ) {
1970 indent_sizes.insert(
1971 new_row,
1972 (suggested_indent, request.ignore_empty_lines),
1973 );
1974 }
1975 }
1976 }
1977
1978 if let (true, Some(original_indent_column)) =
1979 (request.is_block_mode, original_indent_column)
1980 {
1981 let new_indent =
1982 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1983 *indent
1984 } else {
1985 snapshot.indent_size_for_line(row_range.start)
1986 };
1987 let delta = new_indent.len as i64 - original_indent_column as i64;
1988 if delta != 0 {
1989 for row in row_range.skip(1) {
1990 indent_sizes.entry(row).or_insert_with(|| {
1991 let mut size = snapshot.indent_size_for_line(row);
1992 if size.kind == new_indent.kind {
1993 match delta.cmp(&0) {
1994 Ordering::Greater => size.len += delta as u32,
1995 Ordering::Less => {
1996 size.len = size.len.saturating_sub(-delta as u32)
1997 }
1998 Ordering::Equal => {}
1999 }
2000 }
2001 (size, request.ignore_empty_lines)
2002 });
2003 }
2004 }
2005 }
2006
2007 yield_now().await;
2008 }
2009 }
2010
2011 indent_sizes
2012 .into_iter()
2013 .filter_map(|(row, (indent, ignore_empty_lines))| {
2014 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2015 None
2016 } else {
2017 Some((row, indent))
2018 }
2019 })
2020 .collect()
2021 })
2022 }
2023
2024 fn apply_autoindents(
2025 &mut self,
2026 indent_sizes: BTreeMap<u32, IndentSize>,
2027 cx: &mut Context<Self>,
2028 ) {
2029 self.autoindent_requests.clear();
2030 for tx in self.wait_for_autoindent_txs.drain(..) {
2031 tx.send(()).ok();
2032 }
2033
2034 let edits: Vec<_> = indent_sizes
2035 .into_iter()
2036 .filter_map(|(row, indent_size)| {
2037 let current_size = indent_size_for_line(self, row);
2038 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2039 })
2040 .collect();
2041
2042 let preserve_preview = self.preserve_preview();
2043 self.edit(edits, None, cx);
2044 if preserve_preview {
2045 self.refresh_preview();
2046 }
2047 }
2048
2049 /// Create a minimal edit that will cause the given row to be indented
2050 /// with the given size. After applying this edit, the length of the line
2051 /// will always be at least `new_size.len`.
2052 pub fn edit_for_indent_size_adjustment(
2053 row: u32,
2054 current_size: IndentSize,
2055 new_size: IndentSize,
2056 ) -> Option<(Range<Point>, String)> {
2057 if new_size.kind == current_size.kind {
2058 match new_size.len.cmp(¤t_size.len) {
2059 Ordering::Greater => {
2060 let point = Point::new(row, 0);
2061 Some((
2062 point..point,
2063 iter::repeat(new_size.char())
2064 .take((new_size.len - current_size.len) as usize)
2065 .collect::<String>(),
2066 ))
2067 }
2068
2069 Ordering::Less => Some((
2070 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2071 String::new(),
2072 )),
2073
2074 Ordering::Equal => None,
2075 }
2076 } else {
2077 Some((
2078 Point::new(row, 0)..Point::new(row, current_size.len),
2079 iter::repeat(new_size.char())
2080 .take(new_size.len as usize)
2081 .collect::<String>(),
2082 ))
2083 }
2084 }
2085
2086 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2087 /// and the given new text.
2088 pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
2089 where
2090 T: AsRef<str> + Send + 'static,
2091 {
2092 let old_text = self.as_rope().clone();
2093 let base_version = self.version();
2094 cx.background_spawn(async move {
2095 let old_text = old_text.to_string();
2096 let mut new_text = new_text.as_ref().to_owned();
2097 let line_ending = LineEnding::detect(&new_text);
2098 LineEnding::normalize(&mut new_text);
2099 let edits = text_diff(&old_text, &new_text);
2100 Diff {
2101 base_version,
2102 line_ending,
2103 edits,
2104 }
2105 })
2106 }
2107
2108 /// Spawns a background task that searches the buffer for any whitespace
2109 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2110 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2111 let old_text = self.as_rope().clone();
2112 let line_ending = self.line_ending();
2113 let base_version = self.version();
2114 cx.background_spawn(async move {
2115 let ranges = trailing_whitespace_ranges(&old_text);
2116 let empty = Arc::<str>::from("");
2117 Diff {
2118 base_version,
2119 line_ending,
2120 edits: ranges
2121 .into_iter()
2122 .map(|range| (range, empty.clone()))
2123 .collect(),
2124 }
2125 })
2126 }
2127
2128 /// Ensures that the buffer ends with a single newline character, and
2129 /// no other whitespace. Skips if the buffer is empty.
2130 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2131 let len = self.len();
2132 if len == 0 {
2133 return;
2134 }
2135 let mut offset = len;
2136 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2137 let non_whitespace_len = chunk
2138 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2139 .len();
2140 offset -= chunk.len();
2141 offset += non_whitespace_len;
2142 if non_whitespace_len != 0 {
2143 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2144 return;
2145 }
2146 break;
2147 }
2148 }
2149 self.edit([(offset..len, "\n")], None, cx);
2150 }
2151
2152 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2153 /// calculated, then adjust the diff to account for those changes, and discard any
2154 /// parts of the diff that conflict with those changes.
2155 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2156 let snapshot = self.snapshot();
2157 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2158 let mut delta = 0;
2159 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2160 while let Some(edit_since) = edits_since.peek() {
2161 // If the edit occurs after a diff hunk, then it does not
2162 // affect that hunk.
2163 if edit_since.old.start > range.end {
2164 break;
2165 }
2166 // If the edit precedes the diff hunk, then adjust the hunk
2167 // to reflect the edit.
2168 else if edit_since.old.end < range.start {
2169 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2170 edits_since.next();
2171 }
2172 // If the edit intersects a diff hunk, then discard that hunk.
2173 else {
2174 return None;
2175 }
2176 }
2177
2178 let start = (range.start as i64 + delta) as usize;
2179 let end = (range.end as i64 + delta) as usize;
2180 Some((start..end, new_text))
2181 });
2182
2183 self.start_transaction();
2184 self.text.set_line_ending(diff.line_ending);
2185 self.edit(adjusted_edits, None, cx);
2186 self.end_transaction(cx)
2187 }
2188
2189 pub fn has_unsaved_edits(&self) -> bool {
2190 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2191
2192 if last_version == self.version {
2193 self.has_unsaved_edits
2194 .set((last_version, has_unsaved_edits));
2195 return has_unsaved_edits;
2196 }
2197
2198 let has_edits = self.has_edits_since(&self.saved_version);
2199 self.has_unsaved_edits
2200 .set((self.version.clone(), has_edits));
2201 has_edits
2202 }
2203
2204 /// Checks if the buffer has unsaved changes.
2205 pub fn is_dirty(&self) -> bool {
2206 if self.capability == Capability::ReadOnly {
2207 return false;
2208 }
2209 if self.has_conflict {
2210 return true;
2211 }
2212 match self.file.as_ref().map(|f| f.disk_state()) {
2213 Some(DiskState::New) | Some(DiskState::Deleted) => {
2214 !self.is_empty() && self.has_unsaved_edits()
2215 }
2216 _ => self.has_unsaved_edits(),
2217 }
2218 }
2219
2220 /// Marks the buffer as having a conflict regardless of current buffer state.
2221 pub fn set_conflict(&mut self) {
2222 self.has_conflict = true;
2223 }
2224
2225 /// Checks if the buffer and its file have both changed since the buffer
2226 /// was last saved or reloaded.
2227 pub fn has_conflict(&self) -> bool {
2228 if self.has_conflict {
2229 return true;
2230 }
2231 let Some(file) = self.file.as_ref() else {
2232 return false;
2233 };
2234 match file.disk_state() {
2235 DiskState::New => false,
2236 DiskState::Present { mtime, .. } => match self.saved_mtime {
2237 Some(saved_mtime) => {
2238 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2239 }
2240 None => true,
2241 },
2242 DiskState::Deleted => false,
2243 DiskState::Historic { .. } => false,
2244 }
2245 }
2246
2247 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2248 pub fn subscribe(&mut self) -> Subscription<usize> {
2249 self.text.subscribe()
2250 }
2251
2252 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2253 ///
2254 /// This allows downstream code to check if the buffer's text has changed without
2255 /// waiting for an effect cycle, which would be required if using eents.
2256 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2257 if let Err(ix) = self
2258 .change_bits
2259 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2260 {
2261 self.change_bits.insert(ix, bit);
2262 }
2263 }
2264
2265 /// Set the change bit for all "listeners".
2266 fn was_changed(&mut self) {
2267 self.change_bits.retain(|change_bit| {
2268 change_bit
2269 .upgrade()
2270 .inspect(|bit| {
2271 _ = bit.replace(true);
2272 })
2273 .is_some()
2274 });
2275 }
2276
2277 /// Starts a transaction, if one is not already in-progress. When undoing or
2278 /// redoing edits, all of the edits performed within a transaction are undone
2279 /// or redone together.
2280 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2281 self.start_transaction_at(Instant::now())
2282 }
2283
2284 /// Starts a transaction, providing the current time. Subsequent transactions
2285 /// that occur within a short period of time will be grouped together. This
2286 /// is controlled by the buffer's undo grouping duration.
2287 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2288 self.transaction_depth += 1;
2289 if self.was_dirty_before_starting_transaction.is_none() {
2290 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2291 }
2292 self.text.start_transaction_at(now)
2293 }
2294
2295 /// Terminates the current transaction, if this is the outermost transaction.
2296 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2297 self.end_transaction_at(Instant::now(), cx)
2298 }
2299
2300 /// Terminates the current transaction, providing the current time. Subsequent transactions
2301 /// that occur within a short period of time will be grouped together. This
2302 /// is controlled by the buffer's undo grouping duration.
2303 pub fn end_transaction_at(
2304 &mut self,
2305 now: Instant,
2306 cx: &mut Context<Self>,
2307 ) -> Option<TransactionId> {
2308 assert!(self.transaction_depth > 0);
2309 self.transaction_depth -= 1;
2310 let was_dirty = if self.transaction_depth == 0 {
2311 self.was_dirty_before_starting_transaction.take().unwrap()
2312 } else {
2313 false
2314 };
2315 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2316 self.did_edit(&start_version, was_dirty, true, cx);
2317 Some(transaction_id)
2318 } else {
2319 None
2320 }
2321 }
2322
2323 /// Manually add a transaction to the buffer's undo history.
2324 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2325 self.text.push_transaction(transaction, now);
2326 }
2327
2328 /// Differs from `push_transaction` in that it does not clear the redo
2329 /// stack. Intended to be used to create a parent transaction to merge
2330 /// potential child transactions into.
2331 ///
2332 /// The caller is responsible for removing it from the undo history using
2333 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2334 /// are merged into this transaction, the caller is responsible for ensuring
2335 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2336 /// cleared is to create transactions with the usual `start_transaction` and
2337 /// `end_transaction` methods and merging the resulting transactions into
2338 /// the transaction created by this method
2339 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2340 self.text.push_empty_transaction(now)
2341 }
2342
2343 /// Prevent the last transaction from being grouped with any subsequent transactions,
2344 /// even if they occur with the buffer's undo grouping duration.
2345 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2346 self.text.finalize_last_transaction()
2347 }
2348
2349 /// Manually group all changes since a given transaction.
2350 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2351 self.text.group_until_transaction(transaction_id);
2352 }
2353
2354 /// Manually remove a transaction from the buffer's undo history
2355 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2356 self.text.forget_transaction(transaction_id)
2357 }
2358
2359 /// Retrieve a transaction from the buffer's undo history
2360 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2361 self.text.get_transaction(transaction_id)
2362 }
2363
2364 /// Manually merge two transactions in the buffer's undo history.
2365 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2366 self.text.merge_transactions(transaction, destination);
2367 }
2368
2369 /// Waits for the buffer to receive operations with the given timestamps.
2370 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2371 &mut self,
2372 edit_ids: It,
2373 ) -> impl Future<Output = Result<()>> + use<It> {
2374 self.text.wait_for_edits(edit_ids)
2375 }
2376
2377 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2378 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2379 &mut self,
2380 anchors: It,
2381 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2382 self.text.wait_for_anchors(anchors)
2383 }
2384
2385 /// Waits for the buffer to receive operations up to the given version.
2386 pub fn wait_for_version(
2387 &mut self,
2388 version: clock::Global,
2389 ) -> impl Future<Output = Result<()>> + use<> {
2390 self.text.wait_for_version(version)
2391 }
2392
2393 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2394 /// [`Buffer::wait_for_version`] to resolve with an error.
2395 pub fn give_up_waiting(&mut self) {
2396 self.text.give_up_waiting();
2397 }
2398
2399 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2400 let mut rx = None;
2401 if !self.autoindent_requests.is_empty() {
2402 let channel = oneshot::channel();
2403 self.wait_for_autoindent_txs.push(channel.0);
2404 rx = Some(channel.1);
2405 }
2406 rx
2407 }
2408
2409 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2410 pub fn set_active_selections(
2411 &mut self,
2412 selections: Arc<[Selection<Anchor>]>,
2413 line_mode: bool,
2414 cursor_shape: CursorShape,
2415 cx: &mut Context<Self>,
2416 ) {
2417 let lamport_timestamp = self.text.lamport_clock.tick();
2418 self.remote_selections.insert(
2419 self.text.replica_id(),
2420 SelectionSet {
2421 selections: selections.clone(),
2422 lamport_timestamp,
2423 line_mode,
2424 cursor_shape,
2425 },
2426 );
2427 self.send_operation(
2428 Operation::UpdateSelections {
2429 selections,
2430 line_mode,
2431 lamport_timestamp,
2432 cursor_shape,
2433 },
2434 true,
2435 cx,
2436 );
2437 self.non_text_state_update_count += 1;
2438 cx.notify();
2439 }
2440
2441 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2442 /// this replica.
2443 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2444 if self
2445 .remote_selections
2446 .get(&self.text.replica_id())
2447 .is_none_or(|set| !set.selections.is_empty())
2448 {
2449 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2450 }
2451 }
2452
2453 pub fn set_agent_selections(
2454 &mut self,
2455 selections: Arc<[Selection<Anchor>]>,
2456 line_mode: bool,
2457 cursor_shape: CursorShape,
2458 cx: &mut Context<Self>,
2459 ) {
2460 let lamport_timestamp = self.text.lamport_clock.tick();
2461 self.remote_selections.insert(
2462 ReplicaId::AGENT,
2463 SelectionSet {
2464 selections,
2465 lamport_timestamp,
2466 line_mode,
2467 cursor_shape,
2468 },
2469 );
2470 self.non_text_state_update_count += 1;
2471 cx.notify();
2472 }
2473
2474 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2475 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2476 }
2477
2478 /// Replaces the buffer's entire text.
2479 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2480 where
2481 T: Into<Arc<str>>,
2482 {
2483 self.autoindent_requests.clear();
2484 self.edit([(0..self.len(), text)], None, cx)
2485 }
2486
2487 /// Appends the given text to the end of the buffer.
2488 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2489 where
2490 T: Into<Arc<str>>,
2491 {
2492 self.edit([(self.len()..self.len(), text)], None, cx)
2493 }
2494
2495 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2496 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2497 ///
2498 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2499 /// request for the edited ranges, which will be processed when the buffer finishes
2500 /// parsing.
2501 ///
2502 /// Parsing takes place at the end of a transaction, and may compute synchronously
2503 /// or asynchronously, depending on the changes.
2504 pub fn edit<I, S, T>(
2505 &mut self,
2506 edits_iter: I,
2507 autoindent_mode: Option<AutoindentMode>,
2508 cx: &mut Context<Self>,
2509 ) -> Option<clock::Lamport>
2510 where
2511 I: IntoIterator<Item = (Range<S>, T)>,
2512 S: ToOffset,
2513 T: Into<Arc<str>>,
2514 {
2515 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2516 }
2517
2518 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2519 pub fn edit_non_coalesce<I, S, T>(
2520 &mut self,
2521 edits_iter: I,
2522 autoindent_mode: Option<AutoindentMode>,
2523 cx: &mut Context<Self>,
2524 ) -> Option<clock::Lamport>
2525 where
2526 I: IntoIterator<Item = (Range<S>, T)>,
2527 S: ToOffset,
2528 T: Into<Arc<str>>,
2529 {
2530 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2531 }
2532
2533 fn edit_internal<I, S, T>(
2534 &mut self,
2535 edits_iter: I,
2536 autoindent_mode: Option<AutoindentMode>,
2537 coalesce_adjacent: bool,
2538 cx: &mut Context<Self>,
2539 ) -> Option<clock::Lamport>
2540 where
2541 I: IntoIterator<Item = (Range<S>, T)>,
2542 S: ToOffset,
2543 T: Into<Arc<str>>,
2544 {
2545 // Skip invalid edits and coalesce contiguous ones.
2546 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2547
2548 for (range, new_text) in edits_iter {
2549 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2550
2551 if range.start > range.end {
2552 mem::swap(&mut range.start, &mut range.end);
2553 }
2554 let new_text = new_text.into();
2555 if !new_text.is_empty() || !range.is_empty() {
2556 let prev_edit = edits.last_mut();
2557 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2558 if coalesce_adjacent {
2559 prev_range.end >= range.start
2560 } else {
2561 prev_range.end > range.start
2562 }
2563 });
2564
2565 if let Some((prev_range, prev_text)) = prev_edit
2566 && should_coalesce
2567 {
2568 prev_range.end = cmp::max(prev_range.end, range.end);
2569 *prev_text = format!("{prev_text}{new_text}").into();
2570 } else {
2571 edits.push((range, new_text));
2572 }
2573 }
2574 }
2575 if edits.is_empty() {
2576 return None;
2577 }
2578
2579 self.start_transaction();
2580 self.pending_autoindent.take();
2581 let autoindent_request = autoindent_mode
2582 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2583
2584 let edit_operation = self.text.edit(edits.iter().cloned());
2585 let edit_id = edit_operation.timestamp();
2586
2587 if let Some((before_edit, mode)) = autoindent_request {
2588 let mut delta = 0isize;
2589 let mut previous_setting = None;
2590 let entries: Vec<_> = edits
2591 .into_iter()
2592 .enumerate()
2593 .zip(&edit_operation.as_edit().unwrap().new_text)
2594 .filter(|((_, (range, _)), _)| {
2595 let language = before_edit.language_at(range.start);
2596 let language_id = language.map(|l| l.id());
2597 if let Some((cached_language_id, apply_syntax_indent)) = previous_setting
2598 && cached_language_id == language_id
2599 {
2600 apply_syntax_indent
2601 } else {
2602 // The auto-indent setting is not present in editorconfigs, hence
2603 // we can avoid passing the file here.
2604 let auto_indent_mode = LanguageSettings::resolve(
2605 None,
2606 language.map(|l| l.name()).as_ref(),
2607 cx,
2608 )
2609 .auto_indent;
2610 let apply_syntax_indent = auto_indent_mode == AutoIndentMode::SyntaxAware;
2611 previous_setting = Some((language_id, apply_syntax_indent));
2612 apply_syntax_indent
2613 }
2614 })
2615 .map(|((ix, (range, _)), new_text)| {
2616 let new_text_length = new_text.len();
2617 let old_start = range.start.to_point(&before_edit);
2618 let new_start = (delta + range.start as isize) as usize;
2619 let range_len = range.end - range.start;
2620 delta += new_text_length as isize - range_len as isize;
2621
2622 // Decide what range of the insertion to auto-indent, and whether
2623 // the first line of the insertion should be considered a newly-inserted line
2624 // or an edit to an existing line.
2625 let mut range_of_insertion_to_indent = 0..new_text_length;
2626 let mut first_line_is_new = true;
2627
2628 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2629 let old_line_end = before_edit.line_len(old_start.row);
2630
2631 if old_start.column > old_line_start {
2632 first_line_is_new = false;
2633 }
2634
2635 if !new_text.contains('\n')
2636 && (old_start.column + (range_len as u32) < old_line_end
2637 || old_line_end == old_line_start)
2638 {
2639 first_line_is_new = false;
2640 }
2641
2642 // When inserting text starting with a newline, avoid auto-indenting the
2643 // previous line.
2644 if new_text.starts_with('\n') {
2645 range_of_insertion_to_indent.start += 1;
2646 first_line_is_new = true;
2647 }
2648
2649 let mut original_indent_column = None;
2650 if let AutoindentMode::Block {
2651 original_indent_columns,
2652 } = &mode
2653 {
2654 original_indent_column = Some(if new_text.starts_with('\n') {
2655 indent_size_for_text(
2656 new_text[range_of_insertion_to_indent.clone()].chars(),
2657 )
2658 .len
2659 } else {
2660 original_indent_columns
2661 .get(ix)
2662 .copied()
2663 .flatten()
2664 .unwrap_or_else(|| {
2665 indent_size_for_text(
2666 new_text[range_of_insertion_to_indent.clone()].chars(),
2667 )
2668 .len
2669 })
2670 });
2671
2672 // Avoid auto-indenting the line after the edit.
2673 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2674 range_of_insertion_to_indent.end -= 1;
2675 }
2676 }
2677
2678 AutoindentRequestEntry {
2679 original_indent_column,
2680 old_row: if first_line_is_new {
2681 None
2682 } else {
2683 Some(old_start.row)
2684 },
2685 indent_size: before_edit.language_indent_size_at(range.start, cx),
2686 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2687 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2688 }
2689 })
2690 .collect();
2691
2692 if !entries.is_empty() {
2693 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2694 before_edit,
2695 entries,
2696 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2697 ignore_empty_lines: false,
2698 }));
2699 }
2700 }
2701
2702 self.end_transaction(cx);
2703 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2704 Some(edit_id)
2705 }
2706
2707 fn did_edit(
2708 &mut self,
2709 old_version: &clock::Global,
2710 was_dirty: bool,
2711 is_local: bool,
2712 cx: &mut Context<Self>,
2713 ) {
2714 self.was_changed();
2715
2716 if self.edits_since::<usize>(old_version).next().is_none() {
2717 return;
2718 }
2719
2720 self.reparse(cx, true);
2721 cx.emit(BufferEvent::Edited { is_local });
2722 let is_dirty = self.is_dirty();
2723 if was_dirty != is_dirty {
2724 cx.emit(BufferEvent::DirtyChanged);
2725 }
2726 if was_dirty && !is_dirty {
2727 if let Some(file) = self.file.as_ref() {
2728 if matches!(file.disk_state(), DiskState::Present { .. })
2729 && file.disk_state().mtime() != self.saved_mtime
2730 {
2731 cx.emit(BufferEvent::ReloadNeeded);
2732 }
2733 }
2734 }
2735 cx.notify();
2736 }
2737
2738 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2739 where
2740 I: IntoIterator<Item = Range<T>>,
2741 T: ToOffset + Copy,
2742 {
2743 let before_edit = self.snapshot();
2744 let entries = ranges
2745 .into_iter()
2746 .map(|range| AutoindentRequestEntry {
2747 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2748 old_row: None,
2749 indent_size: before_edit.language_indent_size_at(range.start, cx),
2750 original_indent_column: None,
2751 })
2752 .collect();
2753 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2754 before_edit,
2755 entries,
2756 is_block_mode: false,
2757 ignore_empty_lines: true,
2758 }));
2759 self.request_autoindent(cx, Some(Duration::from_micros(300)));
2760 }
2761
2762 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2763 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2764 pub fn insert_empty_line(
2765 &mut self,
2766 position: impl ToPoint,
2767 space_above: bool,
2768 space_below: bool,
2769 cx: &mut Context<Self>,
2770 ) -> Point {
2771 let mut position = position.to_point(self);
2772
2773 self.start_transaction();
2774
2775 self.edit(
2776 [(position..position, "\n")],
2777 Some(AutoindentMode::EachLine),
2778 cx,
2779 );
2780
2781 if position.column > 0 {
2782 position += Point::new(1, 0);
2783 }
2784
2785 if !self.is_line_blank(position.row) {
2786 self.edit(
2787 [(position..position, "\n")],
2788 Some(AutoindentMode::EachLine),
2789 cx,
2790 );
2791 }
2792
2793 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2794 self.edit(
2795 [(position..position, "\n")],
2796 Some(AutoindentMode::EachLine),
2797 cx,
2798 );
2799 position.row += 1;
2800 }
2801
2802 if space_below
2803 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2804 {
2805 self.edit(
2806 [(position..position, "\n")],
2807 Some(AutoindentMode::EachLine),
2808 cx,
2809 );
2810 }
2811
2812 self.end_transaction(cx);
2813
2814 position
2815 }
2816
2817 /// Applies the given remote operations to the buffer.
2818 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2819 self.pending_autoindent.take();
2820 let was_dirty = self.is_dirty();
2821 let old_version = self.version.clone();
2822 let mut deferred_ops = Vec::new();
2823 let buffer_ops = ops
2824 .into_iter()
2825 .filter_map(|op| match op {
2826 Operation::Buffer(op) => Some(op),
2827 _ => {
2828 if self.can_apply_op(&op) {
2829 self.apply_op(op, cx);
2830 } else {
2831 deferred_ops.push(op);
2832 }
2833 None
2834 }
2835 })
2836 .collect::<Vec<_>>();
2837 for operation in buffer_ops.iter() {
2838 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2839 }
2840 self.text.apply_ops(buffer_ops);
2841 self.deferred_ops.insert(deferred_ops);
2842 self.flush_deferred_ops(cx);
2843 self.did_edit(&old_version, was_dirty, false, cx);
2844 // Notify independently of whether the buffer was edited as the operations could include a
2845 // selection update.
2846 cx.notify();
2847 }
2848
2849 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2850 let mut deferred_ops = Vec::new();
2851 for op in self.deferred_ops.drain().iter().cloned() {
2852 if self.can_apply_op(&op) {
2853 self.apply_op(op, cx);
2854 } else {
2855 deferred_ops.push(op);
2856 }
2857 }
2858 self.deferred_ops.insert(deferred_ops);
2859 }
2860
2861 pub fn has_deferred_ops(&self) -> bool {
2862 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2863 }
2864
2865 fn can_apply_op(&self, operation: &Operation) -> bool {
2866 match operation {
2867 Operation::Buffer(_) => {
2868 unreachable!("buffer operations should never be applied at this layer")
2869 }
2870 Operation::UpdateDiagnostics {
2871 diagnostics: diagnostic_set,
2872 ..
2873 } => diagnostic_set.iter().all(|diagnostic| {
2874 self.text.can_resolve(&diagnostic.range.start)
2875 && self.text.can_resolve(&diagnostic.range.end)
2876 }),
2877 Operation::UpdateSelections { selections, .. } => selections
2878 .iter()
2879 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2880 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2881 }
2882 }
2883
2884 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2885 match operation {
2886 Operation::Buffer(_) => {
2887 unreachable!("buffer operations should never be applied at this layer")
2888 }
2889 Operation::UpdateDiagnostics {
2890 server_id,
2891 diagnostics: diagnostic_set,
2892 lamport_timestamp,
2893 } => {
2894 let snapshot = self.snapshot();
2895 self.apply_diagnostic_update(
2896 server_id,
2897 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2898 lamport_timestamp,
2899 cx,
2900 );
2901 }
2902 Operation::UpdateSelections {
2903 selections,
2904 lamport_timestamp,
2905 line_mode,
2906 cursor_shape,
2907 } => {
2908 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2909 && set.lamport_timestamp > lamport_timestamp
2910 {
2911 return;
2912 }
2913
2914 self.remote_selections.insert(
2915 lamport_timestamp.replica_id,
2916 SelectionSet {
2917 selections,
2918 lamport_timestamp,
2919 line_mode,
2920 cursor_shape,
2921 },
2922 );
2923 self.text.lamport_clock.observe(lamport_timestamp);
2924 self.non_text_state_update_count += 1;
2925 }
2926 Operation::UpdateCompletionTriggers {
2927 triggers,
2928 lamport_timestamp,
2929 server_id,
2930 } => {
2931 if triggers.is_empty() {
2932 self.completion_triggers_per_language_server
2933 .remove(&server_id);
2934 self.completion_triggers = self
2935 .completion_triggers_per_language_server
2936 .values()
2937 .flat_map(|triggers| triggers.iter().cloned())
2938 .collect();
2939 } else {
2940 self.completion_triggers_per_language_server
2941 .insert(server_id, triggers.iter().cloned().collect());
2942 self.completion_triggers.extend(triggers);
2943 }
2944 self.text.lamport_clock.observe(lamport_timestamp);
2945 }
2946 Operation::UpdateLineEnding {
2947 line_ending,
2948 lamport_timestamp,
2949 } => {
2950 self.text.set_line_ending(line_ending);
2951 self.text.lamport_clock.observe(lamport_timestamp);
2952 }
2953 }
2954 }
2955
2956 fn apply_diagnostic_update(
2957 &mut self,
2958 server_id: LanguageServerId,
2959 diagnostics: DiagnosticSet,
2960 lamport_timestamp: clock::Lamport,
2961 cx: &mut Context<Self>,
2962 ) {
2963 if lamport_timestamp > self.diagnostics_timestamp {
2964 if diagnostics.is_empty() {
2965 self.diagnostics.remove(&server_id);
2966 } else {
2967 self.diagnostics.insert(server_id, diagnostics);
2968 }
2969 self.diagnostics_timestamp = lamport_timestamp;
2970 self.non_text_state_update_count += 1;
2971 self.text.lamport_clock.observe(lamport_timestamp);
2972 cx.notify();
2973 cx.emit(BufferEvent::DiagnosticsUpdated);
2974 }
2975 }
2976
2977 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2978 self.was_changed();
2979 cx.emit(BufferEvent::Operation {
2980 operation,
2981 is_local,
2982 });
2983 }
2984
2985 /// Removes the selections for a given peer.
2986 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2987 self.remote_selections.remove(&replica_id);
2988 cx.notify();
2989 }
2990
2991 /// Undoes the most recent transaction.
2992 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2993 let was_dirty = self.is_dirty();
2994 let old_version = self.version.clone();
2995
2996 if let Some((transaction_id, operation)) = self.text.undo() {
2997 self.send_operation(Operation::Buffer(operation), true, cx);
2998 self.did_edit(&old_version, was_dirty, true, cx);
2999 self.restore_encoding_for_transaction(transaction_id, was_dirty);
3000 Some(transaction_id)
3001 } else {
3002 None
3003 }
3004 }
3005
3006 /// Manually undoes a specific transaction in the buffer's undo history.
3007 pub fn undo_transaction(
3008 &mut self,
3009 transaction_id: TransactionId,
3010 cx: &mut Context<Self>,
3011 ) -> bool {
3012 let was_dirty = self.is_dirty();
3013 let old_version = self.version.clone();
3014 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3015 self.send_operation(Operation::Buffer(operation), true, cx);
3016 self.did_edit(&old_version, was_dirty, true, cx);
3017 true
3018 } else {
3019 false
3020 }
3021 }
3022
3023 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3024 pub fn undo_to_transaction(
3025 &mut self,
3026 transaction_id: TransactionId,
3027 cx: &mut Context<Self>,
3028 ) -> bool {
3029 let was_dirty = self.is_dirty();
3030 let old_version = self.version.clone();
3031
3032 let operations = self.text.undo_to_transaction(transaction_id);
3033 let undone = !operations.is_empty();
3034 for operation in operations {
3035 self.send_operation(Operation::Buffer(operation), true, cx);
3036 }
3037 if undone {
3038 self.did_edit(&old_version, was_dirty, true, cx)
3039 }
3040 undone
3041 }
3042
3043 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3044 let was_dirty = self.is_dirty();
3045 let operation = self.text.undo_operations(counts);
3046 let old_version = self.version.clone();
3047 self.send_operation(Operation::Buffer(operation), true, cx);
3048 self.did_edit(&old_version, was_dirty, true, cx);
3049 }
3050
3051 /// Manually redoes a specific transaction in the buffer's redo history.
3052 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3053 let was_dirty = self.is_dirty();
3054 let old_version = self.version.clone();
3055
3056 if let Some((transaction_id, operation)) = self.text.redo() {
3057 self.send_operation(Operation::Buffer(operation), true, cx);
3058 self.did_edit(&old_version, was_dirty, true, cx);
3059 self.restore_encoding_for_transaction(transaction_id, was_dirty);
3060 Some(transaction_id)
3061 } else {
3062 None
3063 }
3064 }
3065
3066 fn restore_encoding_for_transaction(&mut self, transaction_id: TransactionId, was_dirty: bool) {
3067 if let Some((old_encoding, old_has_bom)) =
3068 self.reload_with_encoding_txns.get(&transaction_id)
3069 {
3070 let current_encoding = self.encoding;
3071 let current_has_bom = self.has_bom;
3072 self.encoding = *old_encoding;
3073 self.has_bom = *old_has_bom;
3074 if !was_dirty {
3075 self.saved_version = self.version.clone();
3076 self.has_unsaved_edits
3077 .set((self.saved_version.clone(), false));
3078 }
3079 self.reload_with_encoding_txns
3080 .insert(transaction_id, (current_encoding, current_has_bom));
3081 }
3082 }
3083
3084 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3085 pub fn redo_to_transaction(
3086 &mut self,
3087 transaction_id: TransactionId,
3088 cx: &mut Context<Self>,
3089 ) -> bool {
3090 let was_dirty = self.is_dirty();
3091 let old_version = self.version.clone();
3092
3093 let operations = self.text.redo_to_transaction(transaction_id);
3094 let redone = !operations.is_empty();
3095 for operation in operations {
3096 self.send_operation(Operation::Buffer(operation), true, cx);
3097 }
3098 if redone {
3099 self.did_edit(&old_version, was_dirty, true, cx)
3100 }
3101 redone
3102 }
3103
3104 /// Override current completion triggers with the user-provided completion triggers.
3105 pub fn set_completion_triggers(
3106 &mut self,
3107 server_id: LanguageServerId,
3108 triggers: BTreeSet<String>,
3109 cx: &mut Context<Self>,
3110 ) {
3111 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3112 if triggers.is_empty() {
3113 self.completion_triggers_per_language_server
3114 .remove(&server_id);
3115 self.completion_triggers = self
3116 .completion_triggers_per_language_server
3117 .values()
3118 .flat_map(|triggers| triggers.iter().cloned())
3119 .collect();
3120 } else {
3121 self.completion_triggers_per_language_server
3122 .insert(server_id, triggers.clone());
3123 self.completion_triggers.extend(triggers.iter().cloned());
3124 }
3125 self.send_operation(
3126 Operation::UpdateCompletionTriggers {
3127 triggers: triggers.into_iter().collect(),
3128 lamport_timestamp: self.completion_triggers_timestamp,
3129 server_id,
3130 },
3131 true,
3132 cx,
3133 );
3134 cx.notify();
3135 }
3136
3137 /// Returns a list of strings which trigger a completion menu for this language.
3138 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3139 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3140 &self.completion_triggers
3141 }
3142
3143 /// Call this directly after performing edits to prevent the preview tab
3144 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3145 /// to return false until there are additional edits.
3146 pub fn refresh_preview(&mut self) {
3147 self.preview_version = self.version.clone();
3148 }
3149
3150 /// Whether we should preserve the preview status of a tab containing this buffer.
3151 pub fn preserve_preview(&self) -> bool {
3152 !self.has_edits_since(&self.preview_version)
3153 }
3154
3155 pub fn set_group_interval(&mut self, group_interval: Duration) {
3156 self.text.set_group_interval(group_interval);
3157 }
3158}
3159
3160#[doc(hidden)]
3161#[cfg(any(test, feature = "test-support"))]
3162impl Buffer {
3163 pub fn edit_via_marked_text(
3164 &mut self,
3165 marked_string: &str,
3166 autoindent_mode: Option<AutoindentMode>,
3167 cx: &mut Context<Self>,
3168 ) {
3169 let edits = self.edits_for_marked_text(marked_string);
3170 self.edit(edits, autoindent_mode, cx);
3171 }
3172
3173 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3174 where
3175 T: rand::Rng,
3176 {
3177 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3178 let mut last_end = None;
3179 for _ in 0..old_range_count {
3180 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3181 break;
3182 }
3183
3184 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3185 let mut range = self.random_byte_range(new_start, rng);
3186 if rng.random_bool(0.2) {
3187 mem::swap(&mut range.start, &mut range.end);
3188 }
3189 last_end = Some(range.end);
3190
3191 let new_text_len = rng.random_range(0..10);
3192 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3193 new_text = new_text.to_uppercase();
3194
3195 edits.push((range, new_text));
3196 }
3197 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3198 self.edit(edits, None, cx);
3199 }
3200
3201 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3202 let was_dirty = self.is_dirty();
3203 let old_version = self.version.clone();
3204
3205 let ops = self.text.randomly_undo_redo(rng);
3206 if !ops.is_empty() {
3207 for op in ops {
3208 self.send_operation(Operation::Buffer(op), true, cx);
3209 self.did_edit(&old_version, was_dirty, true, cx);
3210 }
3211 }
3212 }
3213}
3214
3215impl EventEmitter<BufferEvent> for Buffer {}
3216
3217fn offset_in_sub_ranges(
3218 sub_ranges: &[Range<Anchor>],
3219 offset: usize,
3220 snapshot: &TextBufferSnapshot,
3221) -> bool {
3222 let start_anchor = snapshot.anchor_before(offset);
3223 let end_anchor = snapshot.anchor_after(offset);
3224
3225 sub_ranges.iter().any(|sub_range| {
3226 let is_before_start = sub_range.end.cmp(&start_anchor, snapshot).is_lt();
3227 let is_after_end = sub_range.start.cmp(&end_anchor, snapshot).is_gt();
3228 !is_before_start && !is_after_end
3229 })
3230}
3231
3232impl Deref for Buffer {
3233 type Target = TextBuffer;
3234
3235 fn deref(&self) -> &Self::Target {
3236 &self.text
3237 }
3238}
3239
3240impl BufferSnapshot {
3241 /// Returns [`IndentSize`] for a given line that respects user settings and
3242 /// language preferences.
3243 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3244 indent_size_for_line(self, row)
3245 }
3246
3247 /// Returns [`IndentSize`] for a given position that respects user settings
3248 /// and language preferences.
3249 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3250 let settings = self.settings_at(position, cx);
3251 if settings.hard_tabs {
3252 IndentSize::tab()
3253 } else {
3254 IndentSize::spaces(settings.tab_size.get())
3255 }
3256 }
3257
3258 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3259 /// is passed in as `single_indent_size`.
3260 pub fn suggested_indents(
3261 &self,
3262 rows: impl Iterator<Item = u32>,
3263 single_indent_size: IndentSize,
3264 ) -> BTreeMap<u32, IndentSize> {
3265 let mut result = BTreeMap::new();
3266
3267 for row_range in contiguous_ranges(rows, 10) {
3268 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3269 Some(suggestions) => suggestions,
3270 _ => break,
3271 };
3272
3273 for (row, suggestion) in row_range.zip(suggestions) {
3274 let indent_size = if let Some(suggestion) = suggestion {
3275 result
3276 .get(&suggestion.basis_row)
3277 .copied()
3278 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3279 .with_delta(suggestion.delta, single_indent_size)
3280 } else {
3281 self.indent_size_for_line(row)
3282 };
3283
3284 result.insert(row, indent_size);
3285 }
3286 }
3287
3288 result
3289 }
3290
3291 fn suggest_autoindents(
3292 &self,
3293 row_range: Range<u32>,
3294 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3295 let config = &self.language.as_ref()?.config;
3296 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3297
3298 #[derive(Debug, Clone)]
3299 struct StartPosition {
3300 start: Point,
3301 suffix: SharedString,
3302 language: Arc<Language>,
3303 }
3304
3305 // Find the suggested indentation ranges based on the syntax tree.
3306 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3307 let end = Point::new(row_range.end, 0);
3308 let range = (start..end).to_offset(&self.text);
3309 let mut matches = self.syntax.matches_with_options(
3310 range.clone(),
3311 &self.text,
3312 TreeSitterOptions {
3313 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3314 max_start_depth: None,
3315 },
3316 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3317 );
3318 let indent_configs = matches
3319 .grammars()
3320 .iter()
3321 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3322 .collect::<Vec<_>>();
3323
3324 let mut indent_ranges = Vec::<Range<Point>>::new();
3325 let mut start_positions = Vec::<StartPosition>::new();
3326 let mut outdent_positions = Vec::<Point>::new();
3327 while let Some(mat) = matches.peek() {
3328 let mut start: Option<Point> = None;
3329 let mut end: Option<Point> = None;
3330
3331 let config = indent_configs[mat.grammar_index];
3332 for capture in mat.captures {
3333 if capture.index == config.indent_capture_ix {
3334 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3335 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3336 } else if Some(capture.index) == config.start_capture_ix {
3337 start = Some(Point::from_ts_point(capture.node.end_position()));
3338 } else if Some(capture.index) == config.end_capture_ix {
3339 end = Some(Point::from_ts_point(capture.node.start_position()));
3340 } else if Some(capture.index) == config.outdent_capture_ix {
3341 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3342 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3343 start_positions.push(StartPosition {
3344 start: Point::from_ts_point(capture.node.start_position()),
3345 suffix: suffix.clone(),
3346 language: mat.language.clone(),
3347 });
3348 }
3349 }
3350
3351 matches.advance();
3352 if let Some((start, end)) = start.zip(end) {
3353 if start.row == end.row {
3354 continue;
3355 }
3356 let range = start..end;
3357 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3358 Err(ix) => indent_ranges.insert(ix, range),
3359 Ok(ix) => {
3360 let prev_range = &mut indent_ranges[ix];
3361 prev_range.end = prev_range.end.max(range.end);
3362 }
3363 }
3364 }
3365 }
3366
3367 let mut error_ranges = Vec::<Range<Point>>::new();
3368 let mut matches = self
3369 .syntax
3370 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3371 while let Some(mat) = matches.peek() {
3372 let node = mat.captures[0].node;
3373 let start = Point::from_ts_point(node.start_position());
3374 let end = Point::from_ts_point(node.end_position());
3375 let range = start..end;
3376 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3377 Ok(ix) | Err(ix) => ix,
3378 };
3379 let mut end_ix = ix;
3380 while let Some(existing_range) = error_ranges.get(end_ix) {
3381 if existing_range.end < end {
3382 end_ix += 1;
3383 } else {
3384 break;
3385 }
3386 }
3387 error_ranges.splice(ix..end_ix, [range]);
3388 matches.advance();
3389 }
3390
3391 outdent_positions.sort();
3392 for outdent_position in outdent_positions {
3393 // find the innermost indent range containing this outdent_position
3394 // set its end to the outdent position
3395 if let Some(range_to_truncate) = indent_ranges
3396 .iter_mut()
3397 .rfind(|indent_range| indent_range.contains(&outdent_position))
3398 {
3399 range_to_truncate.end = outdent_position;
3400 }
3401 }
3402
3403 start_positions.sort_by_key(|b| b.start);
3404
3405 // Find the suggested indentation increases and decreased based on regexes.
3406 let mut regex_outdent_map = HashMap::default();
3407 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3408 let mut start_positions_iter = start_positions.iter().peekable();
3409
3410 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3411 self.for_each_line(
3412 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3413 ..Point::new(row_range.end, 0),
3414 |row, line| {
3415 let indent_len = self.indent_size_for_line(row).len;
3416 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3417 let row_language_config = row_language
3418 .as_ref()
3419 .map(|lang| lang.config())
3420 .unwrap_or(config);
3421
3422 if row_language_config
3423 .decrease_indent_pattern
3424 .as_ref()
3425 .is_some_and(|regex| regex.is_match(line))
3426 {
3427 indent_change_rows.push((row, Ordering::Less));
3428 }
3429 if row_language_config
3430 .increase_indent_pattern
3431 .as_ref()
3432 .is_some_and(|regex| regex.is_match(line))
3433 {
3434 indent_change_rows.push((row + 1, Ordering::Greater));
3435 }
3436 while let Some(pos) = start_positions_iter.peek() {
3437 if pos.start.row < row {
3438 let pos = start_positions_iter.next().unwrap().clone();
3439 last_seen_suffix
3440 .entry(pos.suffix.to_string())
3441 .or_default()
3442 .push(pos);
3443 } else {
3444 break;
3445 }
3446 }
3447 for rule in &row_language_config.decrease_indent_patterns {
3448 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3449 let row_start_column = self.indent_size_for_line(row).len;
3450 let basis_row = rule
3451 .valid_after
3452 .iter()
3453 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3454 .flatten()
3455 .filter(|pos| {
3456 row_language
3457 .as_ref()
3458 .or(self.language.as_ref())
3459 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3460 })
3461 .filter(|pos| pos.start.column <= row_start_column)
3462 .max_by_key(|pos| pos.start.row);
3463 if let Some(outdent_to) = basis_row {
3464 regex_outdent_map.insert(row, outdent_to.start.row);
3465 }
3466 break;
3467 }
3468 }
3469 },
3470 );
3471
3472 let mut indent_changes = indent_change_rows.into_iter().peekable();
3473 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3474 prev_non_blank_row.unwrap_or(0)
3475 } else {
3476 row_range.start.saturating_sub(1)
3477 };
3478
3479 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3480 Some(row_range.map(move |row| {
3481 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3482
3483 let mut indent_from_prev_row = false;
3484 let mut outdent_from_prev_row = false;
3485 let mut outdent_to_row = u32::MAX;
3486 let mut from_regex = false;
3487
3488 while let Some((indent_row, delta)) = indent_changes.peek() {
3489 match indent_row.cmp(&row) {
3490 Ordering::Equal => match delta {
3491 Ordering::Less => {
3492 from_regex = true;
3493 outdent_from_prev_row = true
3494 }
3495 Ordering::Greater => {
3496 indent_from_prev_row = true;
3497 from_regex = true
3498 }
3499 _ => {}
3500 },
3501
3502 Ordering::Greater => break,
3503 Ordering::Less => {}
3504 }
3505
3506 indent_changes.next();
3507 }
3508
3509 for range in &indent_ranges {
3510 if range.start.row >= row {
3511 break;
3512 }
3513 if range.start.row == prev_row && range.end > row_start {
3514 indent_from_prev_row = true;
3515 }
3516 if range.end > prev_row_start && range.end <= row_start {
3517 outdent_to_row = outdent_to_row.min(range.start.row);
3518 }
3519 }
3520
3521 if let Some(basis_row) = regex_outdent_map.get(&row) {
3522 indent_from_prev_row = false;
3523 outdent_to_row = *basis_row;
3524 from_regex = true;
3525 }
3526
3527 let within_error = error_ranges
3528 .iter()
3529 .any(|e| e.start.row < row && e.end > row_start);
3530
3531 let suggestion = if outdent_to_row == prev_row
3532 || (outdent_from_prev_row && indent_from_prev_row)
3533 {
3534 Some(IndentSuggestion {
3535 basis_row: prev_row,
3536 delta: Ordering::Equal,
3537 within_error: within_error && !from_regex,
3538 })
3539 } else if indent_from_prev_row {
3540 Some(IndentSuggestion {
3541 basis_row: prev_row,
3542 delta: Ordering::Greater,
3543 within_error: within_error && !from_regex,
3544 })
3545 } else if outdent_to_row < prev_row {
3546 Some(IndentSuggestion {
3547 basis_row: outdent_to_row,
3548 delta: Ordering::Equal,
3549 within_error: within_error && !from_regex,
3550 })
3551 } else if outdent_from_prev_row {
3552 Some(IndentSuggestion {
3553 basis_row: prev_row,
3554 delta: Ordering::Less,
3555 within_error: within_error && !from_regex,
3556 })
3557 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3558 {
3559 Some(IndentSuggestion {
3560 basis_row: prev_row,
3561 delta: Ordering::Equal,
3562 within_error: within_error && !from_regex,
3563 })
3564 } else {
3565 None
3566 };
3567
3568 prev_row = row;
3569 prev_row_start = row_start;
3570 suggestion
3571 }))
3572 }
3573
3574 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3575 while row > 0 {
3576 row -= 1;
3577 if !self.is_line_blank(row) {
3578 return Some(row);
3579 }
3580 }
3581 None
3582 }
3583
3584 pub fn captures(
3585 &self,
3586 range: Range<usize>,
3587 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3588 ) -> SyntaxMapCaptures<'_> {
3589 self.syntax.captures(range, &self.text, query)
3590 }
3591
3592 #[ztracing::instrument(skip_all)]
3593 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3594 let captures = self.syntax.captures(range, &self.text, |grammar| {
3595 grammar
3596 .highlights_config
3597 .as_ref()
3598 .map(|config| &config.query)
3599 });
3600 let highlight_maps = captures
3601 .grammars()
3602 .iter()
3603 .map(|grammar| grammar.highlight_map())
3604 .collect();
3605 (captures, highlight_maps)
3606 }
3607
3608 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3609 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3610 /// returned in chunks where each chunk has a single syntax highlighting style and
3611 /// diagnostic status.
3612 #[ztracing::instrument(skip_all)]
3613 pub fn chunks<T: ToOffset>(
3614 &self,
3615 range: Range<T>,
3616 language_aware: LanguageAwareStyling,
3617 ) -> BufferChunks<'_> {
3618 let range = range.start.to_offset(self)..range.end.to_offset(self);
3619
3620 let mut syntax = None;
3621 if language_aware.tree_sitter {
3622 syntax = Some(self.get_highlights(range.clone()));
3623 }
3624 BufferChunks::new(
3625 self.text.as_rope(),
3626 range,
3627 syntax,
3628 language_aware.diagnostics,
3629 Some(self),
3630 )
3631 }
3632
3633 pub fn highlighted_text_for_range<T: ToOffset>(
3634 &self,
3635 range: Range<T>,
3636 override_style: Option<HighlightStyle>,
3637 syntax_theme: &SyntaxTheme,
3638 ) -> HighlightedText {
3639 HighlightedText::from_buffer_range(
3640 range,
3641 &self.text,
3642 &self.syntax,
3643 override_style,
3644 syntax_theme,
3645 )
3646 }
3647
3648 /// Invokes the given callback for each line of text in the given range of the buffer.
3649 /// Uses callback to avoid allocating a string for each line.
3650 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3651 let mut line = String::new();
3652 let mut row = range.start.row;
3653 for chunk in self
3654 .as_rope()
3655 .chunks_in_range(range.to_offset(self))
3656 .chain(["\n"])
3657 {
3658 for (newline_ix, text) in chunk.split('\n').enumerate() {
3659 if newline_ix > 0 {
3660 callback(row, &line);
3661 row += 1;
3662 line.clear();
3663 }
3664 line.push_str(text);
3665 }
3666 }
3667 }
3668
3669 /// Iterates over every [`SyntaxLayer`] in the buffer.
3670 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3671 self.syntax_layers_for_range(0..self.len(), true)
3672 }
3673
3674 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3675 let offset = position.to_offset(self);
3676 self.syntax_layers_for_range(offset..offset, false)
3677 .filter(|l| {
3678 if let Some(ranges) = l.included_sub_ranges {
3679 ranges.iter().any(|range| {
3680 let start = range.start.to_offset(self);
3681 start <= offset && {
3682 let end = range.end.to_offset(self);
3683 offset < end
3684 }
3685 })
3686 } else {
3687 l.node().start_byte() <= offset && l.node().end_byte() > offset
3688 }
3689 })
3690 .last()
3691 }
3692
3693 pub fn syntax_layers_for_range<D: ToOffset>(
3694 &self,
3695 range: Range<D>,
3696 include_hidden: bool,
3697 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3698 self.syntax
3699 .layers_for_range(range, &self.text, include_hidden)
3700 }
3701
3702 pub fn syntax_layers_languages(&self) -> impl Iterator<Item = &Arc<Language>> {
3703 self.syntax.languages(&self, true)
3704 }
3705
3706 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3707 &self,
3708 range: Range<D>,
3709 ) -> Option<SyntaxLayer<'_>> {
3710 let range = range.to_offset(self);
3711 self.syntax
3712 .layers_for_range(range, &self.text, false)
3713 .max_by(|a, b| {
3714 if a.depth != b.depth {
3715 a.depth.cmp(&b.depth)
3716 } else if a.offset.0 != b.offset.0 {
3717 a.offset.0.cmp(&b.offset.0)
3718 } else {
3719 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3720 }
3721 })
3722 }
3723
3724 /// Returns the [`ModelineSettings`].
3725 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
3726 self.modeline.as_ref()
3727 }
3728
3729 /// Returns the main [`Language`].
3730 pub fn language(&self) -> Option<&Arc<Language>> {
3731 self.language.as_ref()
3732 }
3733
3734 /// Returns the [`Language`] at the given location.
3735 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3736 self.syntax_layer_at(position)
3737 .map(|info| info.language)
3738 .or(self.language.as_ref())
3739 }
3740
3741 /// Returns the settings for the language at the given location.
3742 pub fn settings_at<'a, D: ToOffset>(
3743 &'a self,
3744 position: D,
3745 cx: &'a App,
3746 ) -> Cow<'a, LanguageSettings> {
3747 LanguageSettings::for_buffer_snapshot(self, Some(position.to_offset(self)), cx)
3748 }
3749
3750 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3751 CharClassifier::new(self.language_scope_at(point))
3752 }
3753
3754 /// Returns the [`LanguageScope`] at the given location.
3755 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3756 let offset = position.to_offset(self);
3757 let mut scope = None;
3758 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3759 let text: &TextBufferSnapshot = self;
3760
3761 // Use the layer that has the smallest node intersecting the given point.
3762 for layer in self
3763 .syntax
3764 .layers_for_range(offset..offset, &self.text, false)
3765 {
3766 if let Some(ranges) = layer.included_sub_ranges
3767 && !offset_in_sub_ranges(ranges, offset, text)
3768 {
3769 continue;
3770 }
3771
3772 let mut cursor = layer.node().walk();
3773
3774 let mut range = None;
3775 loop {
3776 let child_range = cursor.node().byte_range();
3777 if !child_range.contains(&offset) {
3778 break;
3779 }
3780
3781 range = Some(child_range);
3782 if cursor.goto_first_child_for_byte(offset).is_none() {
3783 break;
3784 }
3785 }
3786
3787 if let Some(range) = range
3788 && smallest_range_and_depth.as_ref().is_none_or(
3789 |(smallest_range, smallest_range_depth)| {
3790 if layer.depth > *smallest_range_depth {
3791 true
3792 } else if layer.depth == *smallest_range_depth {
3793 range.len() < smallest_range.len()
3794 } else {
3795 false
3796 }
3797 },
3798 )
3799 {
3800 smallest_range_and_depth = Some((range, layer.depth));
3801 scope = Some(LanguageScope {
3802 language: layer.language.clone(),
3803 override_id: layer.override_id(offset, &self.text),
3804 });
3805 }
3806 }
3807
3808 scope.or_else(|| {
3809 self.language.clone().map(|language| LanguageScope {
3810 language,
3811 override_id: None,
3812 })
3813 })
3814 }
3815
3816 /// Returns a tuple of the range and character kind of the word
3817 /// surrounding the given position.
3818 pub fn surrounding_word<T: ToOffset>(
3819 &self,
3820 start: T,
3821 scope_context: Option<CharScopeContext>,
3822 ) -> (Range<usize>, Option<CharKind>) {
3823 let mut start = start.to_offset(self);
3824 let mut end = start;
3825 let mut next_chars = self.chars_at(start).take(128).peekable();
3826 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3827
3828 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3829 let word_kind = cmp::max(
3830 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3831 next_chars.peek().copied().map(|c| classifier.kind(c)),
3832 );
3833
3834 for ch in prev_chars {
3835 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3836 start -= ch.len_utf8();
3837 } else {
3838 break;
3839 }
3840 }
3841
3842 for ch in next_chars {
3843 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3844 end += ch.len_utf8();
3845 } else {
3846 break;
3847 }
3848 }
3849
3850 (start..end, word_kind)
3851 }
3852
3853 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3854 /// range. When `require_larger` is true, the node found must be larger than the query range.
3855 ///
3856 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3857 /// be moved to the root of the tree.
3858 fn goto_node_enclosing_range(
3859 cursor: &mut tree_sitter::TreeCursor,
3860 query_range: &Range<usize>,
3861 require_larger: bool,
3862 ) -> bool {
3863 let mut ascending = false;
3864 loop {
3865 let mut range = cursor.node().byte_range();
3866 if query_range.is_empty() {
3867 // When the query range is empty and the current node starts after it, move to the
3868 // previous sibling to find the node the containing node.
3869 if range.start > query_range.start {
3870 cursor.goto_previous_sibling();
3871 range = cursor.node().byte_range();
3872 }
3873 } else {
3874 // When the query range is non-empty and the current node ends exactly at the start,
3875 // move to the next sibling to find a node that extends beyond the start.
3876 if range.end == query_range.start {
3877 cursor.goto_next_sibling();
3878 range = cursor.node().byte_range();
3879 }
3880 }
3881
3882 let encloses = range.contains_inclusive(query_range)
3883 && (!require_larger || range.len() > query_range.len());
3884 if !encloses {
3885 ascending = true;
3886 if !cursor.goto_parent() {
3887 return false;
3888 }
3889 continue;
3890 } else if ascending {
3891 return true;
3892 }
3893
3894 // Descend into the current node.
3895 if cursor
3896 .goto_first_child_for_byte(query_range.start)
3897 .is_none()
3898 {
3899 return true;
3900 }
3901 }
3902 }
3903
3904 pub fn syntax_ancestor<'a, T: ToOffset>(
3905 &'a self,
3906 range: Range<T>,
3907 ) -> Option<tree_sitter::Node<'a>> {
3908 let range = range.start.to_offset(self)..range.end.to_offset(self);
3909 let mut result: Option<tree_sitter::Node<'a>> = None;
3910 for layer in self
3911 .syntax
3912 .layers_for_range(range.clone(), &self.text, true)
3913 {
3914 let mut cursor = layer.node().walk();
3915
3916 // Find the node that both contains the range and is larger than it.
3917 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3918 continue;
3919 }
3920
3921 let left_node = cursor.node();
3922 let mut layer_result = left_node;
3923
3924 // For an empty range, try to find another node immediately to the right of the range.
3925 if left_node.end_byte() == range.start {
3926 let mut right_node = None;
3927 while !cursor.goto_next_sibling() {
3928 if !cursor.goto_parent() {
3929 break;
3930 }
3931 }
3932
3933 while cursor.node().start_byte() == range.start {
3934 right_node = Some(cursor.node());
3935 if !cursor.goto_first_child() {
3936 break;
3937 }
3938 }
3939
3940 // If there is a candidate node on both sides of the (empty) range, then
3941 // decide between the two by favoring a named node over an anonymous token.
3942 // If both nodes are the same in that regard, favor the right one.
3943 if let Some(right_node) = right_node
3944 && (right_node.is_named() || !left_node.is_named())
3945 {
3946 layer_result = right_node;
3947 }
3948 }
3949
3950 if let Some(previous_result) = &result
3951 && previous_result.byte_range().len() < layer_result.byte_range().len()
3952 {
3953 continue;
3954 }
3955 result = Some(layer_result);
3956 }
3957
3958 result
3959 }
3960
3961 /// Find the previous sibling syntax node at the given range.
3962 ///
3963 /// This function locates the syntax node that precedes the node containing
3964 /// the given range. It searches hierarchically by:
3965 /// 1. Finding the node that contains the given range
3966 /// 2. Looking for the previous sibling at the same tree level
3967 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3968 ///
3969 /// Returns `None` if there is no previous sibling at any ancestor level.
3970 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3971 &'a self,
3972 range: Range<T>,
3973 ) -> Option<tree_sitter::Node<'a>> {
3974 let range = range.start.to_offset(self)..range.end.to_offset(self);
3975 let mut result: Option<tree_sitter::Node<'a>> = None;
3976
3977 for layer in self
3978 .syntax
3979 .layers_for_range(range.clone(), &self.text, true)
3980 {
3981 let mut cursor = layer.node().walk();
3982
3983 // Find the node that contains the range
3984 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3985 continue;
3986 }
3987
3988 // Look for the previous sibling, moving up ancestor levels if needed
3989 loop {
3990 if cursor.goto_previous_sibling() {
3991 let layer_result = cursor.node();
3992
3993 if let Some(previous_result) = &result {
3994 if previous_result.byte_range().end < layer_result.byte_range().end {
3995 continue;
3996 }
3997 }
3998 result = Some(layer_result);
3999 break;
4000 }
4001
4002 // No sibling found at this level, try moving up to parent
4003 if !cursor.goto_parent() {
4004 break;
4005 }
4006 }
4007 }
4008
4009 result
4010 }
4011
4012 /// Find the next sibling syntax node at the given range.
4013 ///
4014 /// This function locates the syntax node that follows the node containing
4015 /// the given range. It searches hierarchically by:
4016 /// 1. Finding the node that contains the given range
4017 /// 2. Looking for the next sibling at the same tree level
4018 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4019 ///
4020 /// Returns `None` if there is no next sibling at any ancestor level.
4021 pub fn syntax_next_sibling<'a, T: ToOffset>(
4022 &'a self,
4023 range: Range<T>,
4024 ) -> Option<tree_sitter::Node<'a>> {
4025 let range = range.start.to_offset(self)..range.end.to_offset(self);
4026 let mut result: Option<tree_sitter::Node<'a>> = None;
4027
4028 for layer in self
4029 .syntax
4030 .layers_for_range(range.clone(), &self.text, true)
4031 {
4032 let mut cursor = layer.node().walk();
4033
4034 // Find the node that contains the range
4035 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4036 continue;
4037 }
4038
4039 // Look for the next sibling, moving up ancestor levels if needed
4040 loop {
4041 if cursor.goto_next_sibling() {
4042 let layer_result = cursor.node();
4043
4044 if let Some(previous_result) = &result {
4045 if previous_result.byte_range().start > layer_result.byte_range().start {
4046 continue;
4047 }
4048 }
4049 result = Some(layer_result);
4050 break;
4051 }
4052
4053 // No sibling found at this level, try moving up to parent
4054 if !cursor.goto_parent() {
4055 break;
4056 }
4057 }
4058 }
4059
4060 result
4061 }
4062
4063 /// Returns the root syntax node within the given row
4064 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4065 let start_offset = position.to_offset(self);
4066
4067 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4068
4069 let layer = self
4070 .syntax
4071 .layers_for_range(start_offset..start_offset, &self.text, true)
4072 .next()?;
4073
4074 let mut cursor = layer.node().walk();
4075
4076 // Descend to the first leaf that touches the start of the range.
4077 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4078 if cursor.node().end_byte() == start_offset {
4079 cursor.goto_next_sibling();
4080 }
4081 }
4082
4083 // Ascend to the root node within the same row.
4084 while cursor.goto_parent() {
4085 if cursor.node().start_position().row != row {
4086 break;
4087 }
4088 }
4089
4090 Some(cursor.node())
4091 }
4092
4093 /// Returns the outline for the buffer.
4094 ///
4095 /// This method allows passing an optional [`SyntaxTheme`] to
4096 /// syntax-highlight the returned symbols.
4097 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4098 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4099 }
4100
4101 /// Returns all the symbols that contain the given position.
4102 ///
4103 /// This method allows passing an optional [`SyntaxTheme`] to
4104 /// syntax-highlight the returned symbols.
4105 pub fn symbols_containing<T: ToOffset>(
4106 &self,
4107 position: T,
4108 theme: Option<&SyntaxTheme>,
4109 ) -> Vec<OutlineItem<Anchor>> {
4110 let position = position.to_offset(self);
4111 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4112 let end = self.clip_offset(position + 1, Bias::Right);
4113 let mut items = self.outline_items_containing(start..end, false, theme);
4114 let mut prev_depth = None;
4115 items.retain(|item| {
4116 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4117 prev_depth = Some(item.depth);
4118 result
4119 });
4120 items
4121 }
4122
4123 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4124 let range = range.to_offset(self);
4125 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4126 grammar.outline_config.as_ref().map(|c| &c.query)
4127 });
4128 let configs = matches
4129 .grammars()
4130 .iter()
4131 .map(|g| g.outline_config.as_ref().unwrap())
4132 .collect::<Vec<_>>();
4133
4134 while let Some(mat) = matches.peek() {
4135 let config = &configs[mat.grammar_index];
4136 let containing_item_node = maybe!({
4137 let item_node = mat.captures.iter().find_map(|cap| {
4138 if cap.index == config.item_capture_ix {
4139 Some(cap.node)
4140 } else {
4141 None
4142 }
4143 })?;
4144
4145 let item_byte_range = item_node.byte_range();
4146 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4147 None
4148 } else {
4149 Some(item_node)
4150 }
4151 });
4152
4153 if let Some(item_node) = containing_item_node {
4154 return Some(
4155 Point::from_ts_point(item_node.start_position())
4156 ..Point::from_ts_point(item_node.end_position()),
4157 );
4158 }
4159
4160 matches.advance();
4161 }
4162 None
4163 }
4164
4165 pub fn outline_items_containing<T: ToOffset>(
4166 &self,
4167 range: Range<T>,
4168 include_extra_context: bool,
4169 theme: Option<&SyntaxTheme>,
4170 ) -> Vec<OutlineItem<Anchor>> {
4171 self.outline_items_containing_internal(
4172 range,
4173 include_extra_context,
4174 theme,
4175 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4176 )
4177 }
4178
4179 pub fn outline_items_as_points_containing<T: ToOffset>(
4180 &self,
4181 range: Range<T>,
4182 include_extra_context: bool,
4183 theme: Option<&SyntaxTheme>,
4184 ) -> Vec<OutlineItem<Point>> {
4185 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4186 range
4187 })
4188 }
4189
4190 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4191 &self,
4192 range: Range<T>,
4193 include_extra_context: bool,
4194 theme: Option<&SyntaxTheme>,
4195 ) -> Vec<OutlineItem<usize>> {
4196 self.outline_items_containing_internal(
4197 range,
4198 include_extra_context,
4199 theme,
4200 |buffer, range| range.to_offset(buffer),
4201 )
4202 }
4203
4204 fn outline_items_containing_internal<T: ToOffset, U>(
4205 &self,
4206 range: Range<T>,
4207 include_extra_context: bool,
4208 theme: Option<&SyntaxTheme>,
4209 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4210 ) -> Vec<OutlineItem<U>> {
4211 let range = range.to_offset(self);
4212 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4213 grammar.outline_config.as_ref().map(|c| &c.query)
4214 });
4215
4216 let mut items = Vec::new();
4217 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4218 while let Some(mat) = matches.peek() {
4219 let config = matches.grammars()[mat.grammar_index]
4220 .outline_config
4221 .as_ref()
4222 .unwrap();
4223 if let Some(item) =
4224 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4225 {
4226 items.push(item);
4227 } else if let Some(capture) = mat
4228 .captures
4229 .iter()
4230 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4231 {
4232 let capture_range = capture.node.start_position()..capture.node.end_position();
4233 let mut capture_row_range =
4234 capture_range.start.row as u32..capture_range.end.row as u32;
4235 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4236 {
4237 capture_row_range.end -= 1;
4238 }
4239 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4240 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4241 last_row_range.end = capture_row_range.end;
4242 } else {
4243 annotation_row_ranges.push(capture_row_range);
4244 }
4245 } else {
4246 annotation_row_ranges.push(capture_row_range);
4247 }
4248 }
4249 matches.advance();
4250 }
4251
4252 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4253
4254 // Assign depths based on containment relationships and convert to anchors.
4255 let mut item_ends_stack = Vec::<Point>::new();
4256 let mut anchor_items = Vec::new();
4257 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4258 for item in items {
4259 while let Some(last_end) = item_ends_stack.last().copied() {
4260 if last_end < item.range.end {
4261 item_ends_stack.pop();
4262 } else {
4263 break;
4264 }
4265 }
4266
4267 let mut annotation_row_range = None;
4268 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4269 let row_preceding_item = item.range.start.row.saturating_sub(1);
4270 if next_annotation_row_range.end < row_preceding_item {
4271 annotation_row_ranges.next();
4272 } else {
4273 if next_annotation_row_range.end == row_preceding_item {
4274 annotation_row_range = Some(next_annotation_row_range.clone());
4275 annotation_row_ranges.next();
4276 }
4277 break;
4278 }
4279 }
4280
4281 anchor_items.push(OutlineItem {
4282 depth: item_ends_stack.len(),
4283 range: range_callback(self, item.range.clone()),
4284 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4285 text: item.text,
4286 highlight_ranges: item.highlight_ranges,
4287 name_ranges: item.name_ranges,
4288 body_range: item.body_range.map(|r| range_callback(self, r)),
4289 annotation_range: annotation_row_range.map(|annotation_range| {
4290 let point_range = Point::new(annotation_range.start, 0)
4291 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4292 range_callback(self, point_range)
4293 }),
4294 });
4295 item_ends_stack.push(item.range.end);
4296 }
4297
4298 anchor_items
4299 }
4300
4301 fn next_outline_item(
4302 &self,
4303 config: &OutlineConfig,
4304 mat: &SyntaxMapMatch,
4305 range: &Range<usize>,
4306 include_extra_context: bool,
4307 theme: Option<&SyntaxTheme>,
4308 ) -> Option<OutlineItem<Point>> {
4309 let item_node = mat.captures.iter().find_map(|cap| {
4310 if cap.index == config.item_capture_ix {
4311 Some(cap.node)
4312 } else {
4313 None
4314 }
4315 })?;
4316
4317 let item_byte_range = item_node.byte_range();
4318 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4319 return None;
4320 }
4321 let item_point_range = Point::from_ts_point(item_node.start_position())
4322 ..Point::from_ts_point(item_node.end_position());
4323
4324 let mut open_point = None;
4325 let mut close_point = None;
4326
4327 let mut buffer_ranges = Vec::new();
4328 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4329 let mut range = node.start_byte()..node.end_byte();
4330 let start = node.start_position();
4331 if node.end_position().row > start.row {
4332 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4333 }
4334
4335 if !range.is_empty() {
4336 buffer_ranges.push((range, node_is_name));
4337 }
4338 };
4339
4340 for capture in mat.captures {
4341 if capture.index == config.name_capture_ix {
4342 add_to_buffer_ranges(capture.node, true);
4343 } else if Some(capture.index) == config.context_capture_ix
4344 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4345 {
4346 add_to_buffer_ranges(capture.node, false);
4347 } else {
4348 if Some(capture.index) == config.open_capture_ix {
4349 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4350 } else if Some(capture.index) == config.close_capture_ix {
4351 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4352 }
4353 }
4354 }
4355
4356 if buffer_ranges.is_empty() {
4357 return None;
4358 }
4359 let source_range_for_text =
4360 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4361
4362 let mut text = String::new();
4363 let mut highlight_ranges = Vec::new();
4364 let mut name_ranges = Vec::new();
4365 let mut chunks = self.chunks(
4366 source_range_for_text.clone(),
4367 LanguageAwareStyling {
4368 tree_sitter: true,
4369 diagnostics: true,
4370 },
4371 );
4372 let mut last_buffer_range_end = 0;
4373 for (buffer_range, is_name) in buffer_ranges {
4374 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4375 if space_added {
4376 text.push(' ');
4377 }
4378 let before_append_len = text.len();
4379 let mut offset = buffer_range.start;
4380 chunks.seek(buffer_range.clone());
4381 for mut chunk in chunks.by_ref() {
4382 if chunk.text.len() > buffer_range.end - offset {
4383 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4384 offset = buffer_range.end;
4385 } else {
4386 offset += chunk.text.len();
4387 }
4388 let style = chunk
4389 .syntax_highlight_id
4390 .zip(theme)
4391 .and_then(|(highlight, theme)| theme.get(highlight).cloned());
4392
4393 if let Some(style) = style {
4394 let start = text.len();
4395 let end = start + chunk.text.len();
4396 highlight_ranges.push((start..end, style));
4397 }
4398 text.push_str(chunk.text);
4399 if offset >= buffer_range.end {
4400 break;
4401 }
4402 }
4403 if is_name {
4404 let after_append_len = text.len();
4405 let start = if space_added && !name_ranges.is_empty() {
4406 before_append_len - 1
4407 } else {
4408 before_append_len
4409 };
4410 name_ranges.push(start..after_append_len);
4411 }
4412 last_buffer_range_end = buffer_range.end;
4413 }
4414
4415 Some(OutlineItem {
4416 depth: 0, // We'll calculate the depth later
4417 range: item_point_range,
4418 source_range_for_text: source_range_for_text.to_point(self),
4419 text,
4420 highlight_ranges,
4421 name_ranges,
4422 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4423 annotation_range: None,
4424 })
4425 }
4426
4427 pub fn function_body_fold_ranges<T: ToOffset>(
4428 &self,
4429 within: Range<T>,
4430 ) -> impl Iterator<Item = Range<usize>> + '_ {
4431 self.text_object_ranges(within, TreeSitterOptions::default())
4432 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4433 }
4434
4435 /// For each grammar in the language, runs the provided
4436 /// [`tree_sitter::Query`] against the given range.
4437 pub fn matches(
4438 &self,
4439 range: Range<usize>,
4440 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4441 ) -> SyntaxMapMatches<'_> {
4442 self.syntax.matches(range, self, query)
4443 }
4444
4445 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4446 /// Hence, may return more bracket pairs than the range contains.
4447 ///
4448 /// Will omit known chunks.
4449 /// The resulting bracket match collections are not ordered.
4450 pub fn fetch_bracket_ranges(
4451 &self,
4452 range: Range<usize>,
4453 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4454 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4455 let mut all_bracket_matches = HashMap::default();
4456
4457 for chunk in self
4458 .tree_sitter_data
4459 .chunks
4460 .applicable_chunks(&[range.to_point(self)])
4461 {
4462 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4463 continue;
4464 }
4465 let chunk_range = chunk.anchor_range();
4466 let chunk_range = chunk_range.to_offset(&self);
4467
4468 if let Some(cached_brackets) =
4469 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4470 {
4471 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4472 continue;
4473 }
4474
4475 let mut all_brackets: Vec<(BracketMatch<usize>, usize, bool)> = Vec::new();
4476 let mut opens = Vec::new();
4477 let mut color_pairs = Vec::new();
4478
4479 let mut matches = self.syntax.matches_with_options(
4480 chunk_range.clone(),
4481 &self.text,
4482 TreeSitterOptions {
4483 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4484 max_start_depth: None,
4485 },
4486 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4487 );
4488 let configs = matches
4489 .grammars()
4490 .iter()
4491 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4492 .collect::<Vec<_>>();
4493
4494 // Group matches by open range so we can either trust grammar output
4495 // or repair it by picking a single closest close per open.
4496 let mut open_to_close_ranges = BTreeMap::new();
4497 while let Some(mat) = matches.peek() {
4498 let mut open = None;
4499 let mut close = None;
4500 let syntax_layer_depth = mat.depth;
4501 let pattern_index = mat.pattern_index;
4502 let config = configs[mat.grammar_index];
4503 let pattern = &config.patterns[pattern_index];
4504 for capture in mat.captures {
4505 if capture.index == config.open_capture_ix {
4506 open = Some(capture.node.byte_range());
4507 } else if capture.index == config.close_capture_ix {
4508 close = Some(capture.node.byte_range());
4509 }
4510 }
4511
4512 matches.advance();
4513
4514 let Some((open_range, close_range)) = open.zip(close) else {
4515 continue;
4516 };
4517
4518 let bracket_range = open_range.start..=close_range.end;
4519 if !bracket_range.overlaps(&chunk_range) {
4520 continue;
4521 }
4522
4523 open_to_close_ranges
4524 .entry((open_range.start, open_range.end, pattern_index))
4525 .or_insert_with(BTreeMap::new)
4526 .insert(
4527 (close_range.start, close_range.end),
4528 BracketMatch {
4529 open_range: open_range.clone(),
4530 close_range: close_range.clone(),
4531 syntax_layer_depth,
4532 newline_only: pattern.newline_only,
4533 color_index: None,
4534 },
4535 );
4536
4537 all_brackets.push((
4538 BracketMatch {
4539 open_range,
4540 close_range,
4541 syntax_layer_depth,
4542 newline_only: pattern.newline_only,
4543 color_index: None,
4544 },
4545 pattern_index,
4546 pattern.rainbow_exclude,
4547 ));
4548 }
4549
4550 let has_bogus_matches = open_to_close_ranges
4551 .iter()
4552 .any(|(_, end_ranges)| end_ranges.len() > 1);
4553 if has_bogus_matches {
4554 // Grammar is producing bogus matches where one open is paired with multiple
4555 // closes. Build a valid stack by walking through positions in order.
4556 // For each close, we know the expected open_len from tree-sitter matches.
4557
4558 // Map each close to its expected open length (for inferring opens)
4559 let close_to_open_len: HashMap<(usize, usize, usize), usize> = all_brackets
4560 .iter()
4561 .map(|(bracket_match, pattern_index, _)| {
4562 (
4563 (
4564 bracket_match.close_range.start,
4565 bracket_match.close_range.end,
4566 *pattern_index,
4567 ),
4568 bracket_match.open_range.len(),
4569 )
4570 })
4571 .collect();
4572
4573 // Collect unique opens and closes within this chunk
4574 let mut unique_opens: HashSet<(usize, usize, usize)> = all_brackets
4575 .iter()
4576 .map(|(bracket_match, pattern_index, _)| {
4577 (
4578 bracket_match.open_range.start,
4579 bracket_match.open_range.end,
4580 *pattern_index,
4581 )
4582 })
4583 .filter(|(start, _, _)| chunk_range.contains(start))
4584 .collect();
4585
4586 let mut unique_closes: Vec<(usize, usize, usize)> = all_brackets
4587 .iter()
4588 .map(|(bracket_match, pattern_index, _)| {
4589 (
4590 bracket_match.close_range.start,
4591 bracket_match.close_range.end,
4592 *pattern_index,
4593 )
4594 })
4595 .filter(|(start, _, _)| chunk_range.contains(start))
4596 .collect();
4597 unique_closes.sort();
4598 unique_closes.dedup();
4599
4600 // Build valid pairs by walking through closes in order
4601 let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4602 unique_opens_vec.sort();
4603
4604 let mut valid_pairs: HashSet<((usize, usize, usize), (usize, usize, usize))> =
4605 HashSet::default();
4606 let mut open_stacks: HashMap<usize, Vec<(usize, usize)>> = HashMap::default();
4607 let mut open_idx = 0;
4608
4609 for close in &unique_closes {
4610 // Push all opens before this close onto stack
4611 while open_idx < unique_opens_vec.len()
4612 && unique_opens_vec[open_idx].0 < close.0
4613 {
4614 let (start, end, pattern_index) = unique_opens_vec[open_idx];
4615 open_stacks
4616 .entry(pattern_index)
4617 .or_default()
4618 .push((start, end));
4619 open_idx += 1;
4620 }
4621
4622 // Try to match with most recent open
4623 let (close_start, close_end, pattern_index) = *close;
4624 if let Some(open) = open_stacks
4625 .get_mut(&pattern_index)
4626 .and_then(|open_stack| open_stack.pop())
4627 {
4628 valid_pairs.insert(((open.0, open.1, pattern_index), *close));
4629 } else if let Some(&open_len) = close_to_open_len.get(close) {
4630 // No open on stack - infer one based on expected open_len
4631 if close_start >= open_len {
4632 let inferred = (close_start - open_len, close_start, pattern_index);
4633 unique_opens.insert(inferred);
4634 valid_pairs.insert((inferred, *close));
4635 all_brackets.push((
4636 BracketMatch {
4637 open_range: inferred.0..inferred.1,
4638 close_range: close_start..close_end,
4639 newline_only: false,
4640 syntax_layer_depth: 0,
4641 color_index: None,
4642 },
4643 pattern_index,
4644 false,
4645 ));
4646 }
4647 }
4648 }
4649
4650 all_brackets.retain(|(bracket_match, pattern_index, _)| {
4651 let open = (
4652 bracket_match.open_range.start,
4653 bracket_match.open_range.end,
4654 *pattern_index,
4655 );
4656 let close = (
4657 bracket_match.close_range.start,
4658 bracket_match.close_range.end,
4659 *pattern_index,
4660 );
4661 valid_pairs.contains(&(open, close))
4662 });
4663 }
4664
4665 let mut all_brackets = all_brackets
4666 .into_iter()
4667 .enumerate()
4668 .map(|(index, (bracket_match, _, rainbow_exclude))| {
4669 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4670 // bracket will match the entire tag with all text inside.
4671 // For now, avoid highlighting any pair that has more than single char in each bracket.
4672 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4673 let should_color = !rainbow_exclude
4674 && (bracket_match.open_range.len() == 1
4675 || bracket_match.close_range.len() == 1);
4676 if should_color {
4677 opens.push(bracket_match.open_range.clone());
4678 color_pairs.push((
4679 bracket_match.open_range.clone(),
4680 bracket_match.close_range.clone(),
4681 index,
4682 ));
4683 }
4684 bracket_match
4685 })
4686 .collect::<Vec<_>>();
4687
4688 opens.sort_by_key(|r| (r.start, r.end));
4689 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4690 color_pairs.sort_by_key(|(_, close, _)| close.end);
4691
4692 let mut open_stack = Vec::new();
4693 let mut open_index = 0;
4694 for (open, close, index) in color_pairs {
4695 while open_index < opens.len() && opens[open_index].start < close.start {
4696 open_stack.push(opens[open_index].clone());
4697 open_index += 1;
4698 }
4699
4700 if open_stack.last() == Some(&open) {
4701 let depth_index = open_stack.len() - 1;
4702 all_brackets[index].color_index = Some(depth_index);
4703 open_stack.pop();
4704 }
4705 }
4706
4707 all_brackets.sort_by_key(|bracket_match| {
4708 (bracket_match.open_range.start, bracket_match.open_range.end)
4709 });
4710
4711 if let empty_slot @ None =
4712 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4713 {
4714 *empty_slot = Some(all_brackets.clone());
4715 }
4716 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4717 }
4718
4719 all_bracket_matches
4720 }
4721
4722 pub fn all_bracket_ranges(
4723 &self,
4724 range: Range<usize>,
4725 ) -> impl Iterator<Item = BracketMatch<usize>> {
4726 self.fetch_bracket_ranges(range.clone(), None)
4727 .into_values()
4728 .flatten()
4729 .filter(move |bracket_match| {
4730 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4731 bracket_range.overlaps(&range)
4732 })
4733 }
4734
4735 /// Returns bracket range pairs overlapping or adjacent to `range`
4736 pub fn bracket_ranges<T: ToOffset>(
4737 &self,
4738 range: Range<T>,
4739 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4740 // Find bracket pairs that *inclusively* contain the given range.
4741 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4742 self.all_bracket_ranges(range)
4743 .filter(|pair| !pair.newline_only)
4744 }
4745
4746 pub fn debug_variables_query<T: ToOffset>(
4747 &self,
4748 range: Range<T>,
4749 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4750 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4751
4752 let mut matches = self.syntax.matches_with_options(
4753 range.clone(),
4754 &self.text,
4755 TreeSitterOptions::default(),
4756 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4757 );
4758
4759 let configs = matches
4760 .grammars()
4761 .iter()
4762 .map(|grammar| grammar.debug_variables_config.as_ref())
4763 .collect::<Vec<_>>();
4764
4765 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4766
4767 iter::from_fn(move || {
4768 loop {
4769 while let Some(capture) = captures.pop() {
4770 if capture.0.overlaps(&range) {
4771 return Some(capture);
4772 }
4773 }
4774
4775 let mat = matches.peek()?;
4776
4777 let Some(config) = configs[mat.grammar_index].as_ref() else {
4778 matches.advance();
4779 continue;
4780 };
4781
4782 for capture in mat.captures {
4783 let Some(ix) = config
4784 .objects_by_capture_ix
4785 .binary_search_by_key(&capture.index, |e| e.0)
4786 .ok()
4787 else {
4788 continue;
4789 };
4790 let text_object = config.objects_by_capture_ix[ix].1;
4791 let byte_range = capture.node.byte_range();
4792
4793 let mut found = false;
4794 for (range, existing) in captures.iter_mut() {
4795 if existing == &text_object {
4796 range.start = range.start.min(byte_range.start);
4797 range.end = range.end.max(byte_range.end);
4798 found = true;
4799 break;
4800 }
4801 }
4802
4803 if !found {
4804 captures.push((byte_range, text_object));
4805 }
4806 }
4807
4808 matches.advance();
4809 }
4810 })
4811 }
4812
4813 pub fn text_object_ranges<T: ToOffset>(
4814 &self,
4815 range: Range<T>,
4816 options: TreeSitterOptions,
4817 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4818 let range =
4819 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4820
4821 let mut matches =
4822 self.syntax
4823 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4824 grammar.text_object_config.as_ref().map(|c| &c.query)
4825 });
4826
4827 let configs = matches
4828 .grammars()
4829 .iter()
4830 .map(|grammar| grammar.text_object_config.as_ref())
4831 .collect::<Vec<_>>();
4832
4833 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4834
4835 iter::from_fn(move || {
4836 loop {
4837 while let Some(capture) = captures.pop() {
4838 if capture.0.overlaps(&range) {
4839 return Some(capture);
4840 }
4841 }
4842
4843 let mat = matches.peek()?;
4844
4845 let Some(config) = configs[mat.grammar_index].as_ref() else {
4846 matches.advance();
4847 continue;
4848 };
4849
4850 for capture in mat.captures {
4851 let Some(ix) = config
4852 .text_objects_by_capture_ix
4853 .binary_search_by_key(&capture.index, |e| e.0)
4854 .ok()
4855 else {
4856 continue;
4857 };
4858 let text_object = config.text_objects_by_capture_ix[ix].1;
4859 let byte_range = capture.node.byte_range();
4860
4861 let mut found = false;
4862 for (range, existing) in captures.iter_mut() {
4863 if existing == &text_object {
4864 range.start = range.start.min(byte_range.start);
4865 range.end = range.end.max(byte_range.end);
4866 found = true;
4867 break;
4868 }
4869 }
4870
4871 if !found {
4872 captures.push((byte_range, text_object));
4873 }
4874 }
4875
4876 matches.advance();
4877 }
4878 })
4879 }
4880
4881 /// Returns enclosing bracket ranges containing the given range
4882 pub fn enclosing_bracket_ranges<T: ToOffset>(
4883 &self,
4884 range: Range<T>,
4885 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4886 let range = range.start.to_offset(self)..range.end.to_offset(self);
4887
4888 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4889 let max_depth = result
4890 .iter()
4891 .map(|mat| mat.syntax_layer_depth)
4892 .max()
4893 .unwrap_or(0);
4894 result.into_iter().filter(move |pair| {
4895 pair.open_range.start <= range.start
4896 && pair.close_range.end >= range.end
4897 && pair.syntax_layer_depth == max_depth
4898 })
4899 }
4900
4901 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4902 ///
4903 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4904 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4905 &self,
4906 range: Range<T>,
4907 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4908 ) -> Option<(Range<usize>, Range<usize>)> {
4909 let range = range.start.to_offset(self)..range.end.to_offset(self);
4910
4911 // Get the ranges of the innermost pair of brackets.
4912 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4913
4914 for pair in self.enclosing_bracket_ranges(range) {
4915 if let Some(range_filter) = range_filter
4916 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4917 {
4918 continue;
4919 }
4920
4921 let len = pair.close_range.end - pair.open_range.start;
4922
4923 if let Some((existing_open, existing_close)) = &result {
4924 let existing_len = existing_close.end - existing_open.start;
4925 if len > existing_len {
4926 continue;
4927 }
4928 }
4929
4930 result = Some((pair.open_range, pair.close_range));
4931 }
4932
4933 result
4934 }
4935
4936 /// Returns anchor ranges for any matches of the redaction query.
4937 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4938 /// will be run on the relevant section of the buffer.
4939 pub fn redacted_ranges<T: ToOffset>(
4940 &self,
4941 range: Range<T>,
4942 ) -> impl Iterator<Item = Range<usize>> + '_ {
4943 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4944 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4945 grammar
4946 .redactions_config
4947 .as_ref()
4948 .map(|config| &config.query)
4949 });
4950
4951 let configs = syntax_matches
4952 .grammars()
4953 .iter()
4954 .map(|grammar| grammar.redactions_config.as_ref())
4955 .collect::<Vec<_>>();
4956
4957 iter::from_fn(move || {
4958 let redacted_range = syntax_matches
4959 .peek()
4960 .and_then(|mat| {
4961 configs[mat.grammar_index].and_then(|config| {
4962 mat.captures
4963 .iter()
4964 .find(|capture| capture.index == config.redaction_capture_ix)
4965 })
4966 })
4967 .map(|mat| mat.node.byte_range());
4968 syntax_matches.advance();
4969 redacted_range
4970 })
4971 }
4972
4973 pub fn injections_intersecting_range<T: ToOffset>(
4974 &self,
4975 range: Range<T>,
4976 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4977 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4978
4979 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4980 grammar
4981 .injection_config
4982 .as_ref()
4983 .map(|config| &config.query)
4984 });
4985
4986 let configs = syntax_matches
4987 .grammars()
4988 .iter()
4989 .map(|grammar| grammar.injection_config.as_ref())
4990 .collect::<Vec<_>>();
4991
4992 iter::from_fn(move || {
4993 let ranges = syntax_matches.peek().and_then(|mat| {
4994 let config = &configs[mat.grammar_index]?;
4995 let content_capture_range = mat.captures.iter().find_map(|capture| {
4996 if capture.index == config.content_capture_ix {
4997 Some(capture.node.byte_range())
4998 } else {
4999 None
5000 }
5001 })?;
5002 let language = self.language_at(content_capture_range.start)?;
5003 Some((content_capture_range, language))
5004 });
5005 syntax_matches.advance();
5006 ranges
5007 })
5008 }
5009
5010 pub fn runnable_ranges(
5011 &self,
5012 offset_range: Range<usize>,
5013 ) -> impl Iterator<Item = RunnableRange> + '_ {
5014 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
5015 grammar.runnable_config.as_ref().map(|config| &config.query)
5016 });
5017
5018 let test_configs = syntax_matches
5019 .grammars()
5020 .iter()
5021 .map(|grammar| grammar.runnable_config.as_ref())
5022 .collect::<Vec<_>>();
5023
5024 iter::from_fn(move || {
5025 loop {
5026 let mat = syntax_matches.peek()?;
5027
5028 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
5029 let mut run_range = None;
5030 let full_range = mat.captures.iter().fold(
5031 Range {
5032 start: usize::MAX,
5033 end: 0,
5034 },
5035 |mut acc, next| {
5036 let byte_range = next.node.byte_range();
5037 if acc.start > byte_range.start {
5038 acc.start = byte_range.start;
5039 }
5040 if acc.end < byte_range.end {
5041 acc.end = byte_range.end;
5042 }
5043 acc
5044 },
5045 );
5046 if full_range.start > full_range.end {
5047 // We did not find a full spanning range of this match.
5048 return None;
5049 }
5050 let extra_captures: SmallVec<[_; 1]> =
5051 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
5052 test_configs
5053 .extra_captures
5054 .get(capture.index as usize)
5055 .cloned()
5056 .and_then(|tag_name| match tag_name {
5057 RunnableCapture::Named(name) => {
5058 Some((capture.node.byte_range(), name))
5059 }
5060 RunnableCapture::Run => {
5061 let _ = run_range.insert(capture.node.byte_range());
5062 None
5063 }
5064 })
5065 }));
5066 let run_range = run_range?;
5067 let tags = test_configs
5068 .query
5069 .property_settings(mat.pattern_index)
5070 .iter()
5071 .filter_map(|property| {
5072 if *property.key == *"tag" {
5073 property
5074 .value
5075 .as_ref()
5076 .map(|value| RunnableTag(value.to_string().into()))
5077 } else {
5078 None
5079 }
5080 })
5081 .collect();
5082 let extra_captures = extra_captures
5083 .into_iter()
5084 .map(|(range, name)| {
5085 (
5086 name.to_string(),
5087 self.text_for_range(range).collect::<String>(),
5088 )
5089 })
5090 .collect();
5091 // All tags should have the same range.
5092 Some(RunnableRange {
5093 run_range,
5094 full_range,
5095 runnable: Runnable {
5096 tags,
5097 language: mat.language,
5098 buffer: self.remote_id(),
5099 },
5100 extra_captures,
5101 buffer_id: self.remote_id(),
5102 })
5103 });
5104
5105 syntax_matches.advance();
5106 if test_range.is_some() {
5107 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5108 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5109 return test_range;
5110 }
5111 }
5112 })
5113 }
5114
5115 /// Returns selections for remote peers intersecting the given range.
5116 #[allow(clippy::type_complexity)]
5117 pub fn selections_in_range(
5118 &self,
5119 range: Range<Anchor>,
5120 include_local: bool,
5121 ) -> impl Iterator<
5122 Item = (
5123 ReplicaId,
5124 bool,
5125 CursorShape,
5126 impl Iterator<Item = &Selection<Anchor>> + '_,
5127 ),
5128 > + '_ {
5129 self.remote_selections
5130 .iter()
5131 .filter(move |(replica_id, set)| {
5132 (include_local || **replica_id != self.text.replica_id())
5133 && !set.selections.is_empty()
5134 })
5135 .map(move |(replica_id, set)| {
5136 let start_ix = match set.selections.binary_search_by(|probe| {
5137 probe.end.cmp(&range.start, self).then(Ordering::Greater)
5138 }) {
5139 Ok(ix) | Err(ix) => ix,
5140 };
5141 let end_ix = match set.selections.binary_search_by(|probe| {
5142 probe.start.cmp(&range.end, self).then(Ordering::Less)
5143 }) {
5144 Ok(ix) | Err(ix) => ix,
5145 };
5146
5147 (
5148 *replica_id,
5149 set.line_mode,
5150 set.cursor_shape,
5151 set.selections[start_ix..end_ix].iter(),
5152 )
5153 })
5154 }
5155
5156 /// Returns if the buffer contains any diagnostics.
5157 pub fn has_diagnostics(&self) -> bool {
5158 !self.diagnostics.is_empty()
5159 }
5160
5161 /// Returns all the diagnostics intersecting the given range.
5162 pub fn diagnostics_in_range<'a, T, O>(
5163 &'a self,
5164 search_range: Range<T>,
5165 reversed: bool,
5166 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5167 where
5168 T: 'a + Clone + ToOffset,
5169 O: 'a + FromAnchor,
5170 {
5171 let mut iterators: Vec<_> = self
5172 .diagnostics
5173 .iter()
5174 .map(|(_, collection)| {
5175 collection
5176 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5177 .peekable()
5178 })
5179 .collect();
5180
5181 std::iter::from_fn(move || {
5182 let (next_ix, _) = iterators
5183 .iter_mut()
5184 .enumerate()
5185 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5186 .min_by(|(_, a), (_, b)| {
5187 let cmp = a
5188 .range
5189 .start
5190 .cmp(&b.range.start, self)
5191 // when range is equal, sort by diagnostic severity
5192 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5193 // and stabilize order with group_id
5194 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5195 if reversed { cmp.reverse() } else { cmp }
5196 })?;
5197 iterators[next_ix]
5198 .next()
5199 .map(
5200 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5201 diagnostic,
5202 range: FromAnchor::from_anchor(&range.start, self)
5203 ..FromAnchor::from_anchor(&range.end, self),
5204 },
5205 )
5206 })
5207 }
5208
5209 /// Returns all the diagnostic groups associated with the given
5210 /// language server ID. If no language server ID is provided,
5211 /// all diagnostics groups are returned.
5212 pub fn diagnostic_groups(
5213 &self,
5214 language_server_id: Option<LanguageServerId>,
5215 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5216 let mut groups = Vec::new();
5217
5218 if let Some(language_server_id) = language_server_id {
5219 if let Some(set) = self.diagnostics.get(&language_server_id) {
5220 set.groups(language_server_id, &mut groups, self);
5221 }
5222 } else {
5223 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5224 diagnostics.groups(*language_server_id, &mut groups, self);
5225 }
5226 }
5227
5228 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5229 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5230 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5231 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5232 });
5233
5234 groups
5235 }
5236
5237 /// Returns an iterator over the diagnostics for the given group.
5238 pub fn diagnostic_group<O>(
5239 &self,
5240 group_id: usize,
5241 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5242 where
5243 O: FromAnchor + 'static,
5244 {
5245 self.diagnostics
5246 .iter()
5247 .flat_map(move |(_, set)| set.group(group_id, self))
5248 }
5249
5250 /// An integer version number that accounts for all updates besides
5251 /// the buffer's text itself (which is versioned via a version vector).
5252 pub fn non_text_state_update_count(&self) -> usize {
5253 self.non_text_state_update_count
5254 }
5255
5256 /// An integer version that changes when the buffer's syntax changes.
5257 pub fn syntax_update_count(&self) -> usize {
5258 self.syntax.update_count()
5259 }
5260
5261 /// Returns a snapshot of underlying file.
5262 pub fn file(&self) -> Option<&Arc<dyn File>> {
5263 self.file.as_ref()
5264 }
5265
5266 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5267 if let Some(file) = self.file() {
5268 if file.path().file_name().is_none() || include_root {
5269 Some(file.full_path(cx).to_string_lossy().into_owned())
5270 } else {
5271 Some(file.path().display(file.path_style(cx)).to_string())
5272 }
5273 } else {
5274 None
5275 }
5276 }
5277
5278 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5279 let query_str = query.fuzzy_contents;
5280 if query_str.is_some_and(|query| query.is_empty()) {
5281 return BTreeMap::default();
5282 }
5283
5284 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5285 language,
5286 override_id: None,
5287 }));
5288
5289 let mut query_ix = 0;
5290 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5291 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5292
5293 let mut words = BTreeMap::default();
5294 let mut current_word_start_ix = None;
5295 let mut chunk_ix = query.range.start;
5296 for chunk in self.chunks(
5297 query.range,
5298 LanguageAwareStyling {
5299 tree_sitter: false,
5300 diagnostics: false,
5301 },
5302 ) {
5303 for (i, c) in chunk.text.char_indices() {
5304 let ix = chunk_ix + i;
5305 if classifier.is_word(c) {
5306 if current_word_start_ix.is_none() {
5307 current_word_start_ix = Some(ix);
5308 }
5309
5310 if let Some(query_chars) = &query_chars
5311 && query_ix < query_len
5312 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5313 {
5314 query_ix += 1;
5315 }
5316 continue;
5317 } else if let Some(word_start) = current_word_start_ix.take()
5318 && query_ix == query_len
5319 {
5320 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5321 let mut word_text = self.text_for_range(word_start..ix).peekable();
5322 let first_char = word_text
5323 .peek()
5324 .and_then(|first_chunk| first_chunk.chars().next());
5325 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5326 if !query.skip_digits
5327 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5328 {
5329 words.insert(word_text.collect(), word_range);
5330 }
5331 }
5332 query_ix = 0;
5333 }
5334 chunk_ix += chunk.text.len();
5335 }
5336
5337 words
5338 }
5339}
5340
5341/// A configuration to use when producing styled text chunks.
5342#[derive(Clone, Copy)]
5343pub struct LanguageAwareStyling {
5344 /// Whether to highlight text chunks using tree-sitter.
5345 pub tree_sitter: bool,
5346 /// Whether to highlight text chunks based on the diagnostics data.
5347 pub diagnostics: bool,
5348}
5349
5350pub struct WordsQuery<'a> {
5351 /// Only returns words with all chars from the fuzzy string in them.
5352 pub fuzzy_contents: Option<&'a str>,
5353 /// Skips words that start with a digit.
5354 pub skip_digits: bool,
5355 /// Buffer offset range, to look for words.
5356 pub range: Range<usize>,
5357}
5358
5359fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5360 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5361}
5362
5363fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5364 let mut result = IndentSize::spaces(0);
5365 for c in text {
5366 let kind = match c {
5367 ' ' => IndentKind::Space,
5368 '\t' => IndentKind::Tab,
5369 _ => break,
5370 };
5371 if result.len == 0 {
5372 result.kind = kind;
5373 }
5374 result.len += 1;
5375 }
5376 result
5377}
5378
5379impl Clone for BufferSnapshot {
5380 fn clone(&self) -> Self {
5381 Self {
5382 text: self.text.clone(),
5383 syntax: self.syntax.clone(),
5384 file: self.file.clone(),
5385 remote_selections: self.remote_selections.clone(),
5386 diagnostics: self.diagnostics.clone(),
5387 language: self.language.clone(),
5388 tree_sitter_data: self.tree_sitter_data.clone(),
5389 non_text_state_update_count: self.non_text_state_update_count,
5390 capability: self.capability,
5391 modeline: self.modeline.clone(),
5392 }
5393 }
5394}
5395
5396impl Deref for BufferSnapshot {
5397 type Target = text::BufferSnapshot;
5398
5399 fn deref(&self) -> &Self::Target {
5400 &self.text
5401 }
5402}
5403
5404unsafe impl Send for BufferChunks<'_> {}
5405
5406impl<'a> BufferChunks<'a> {
5407 pub(crate) fn new(
5408 text: &'a Rope,
5409 range: Range<usize>,
5410 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5411 diagnostics: bool,
5412 buffer_snapshot: Option<&'a BufferSnapshot>,
5413 ) -> Self {
5414 let mut highlights = None;
5415 if let Some((captures, highlight_maps)) = syntax {
5416 highlights = Some(BufferChunkHighlights {
5417 captures,
5418 next_capture: None,
5419 stack: Default::default(),
5420 highlight_maps,
5421 })
5422 }
5423
5424 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5425 let chunks = text.chunks_in_range(range.clone());
5426
5427 let mut this = BufferChunks {
5428 range,
5429 buffer_snapshot,
5430 chunks,
5431 diagnostic_endpoints,
5432 error_depth: 0,
5433 warning_depth: 0,
5434 information_depth: 0,
5435 hint_depth: 0,
5436 unnecessary_depth: 0,
5437 underline: true,
5438 highlights,
5439 };
5440 this.initialize_diagnostic_endpoints();
5441 this
5442 }
5443
5444 /// Seeks to the given byte offset in the buffer.
5445 pub fn seek(&mut self, range: Range<usize>) {
5446 let old_range = std::mem::replace(&mut self.range, range.clone());
5447 self.chunks.set_range(self.range.clone());
5448 if let Some(highlights) = self.highlights.as_mut() {
5449 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5450 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5451 highlights
5452 .stack
5453 .retain(|(end_offset, _)| *end_offset > range.start);
5454 if let Some(capture) = &highlights.next_capture
5455 && range.start >= capture.node.start_byte()
5456 {
5457 let next_capture_end = capture.node.end_byte();
5458 if range.start < next_capture_end
5459 && let Some(capture_id) =
5460 highlights.highlight_maps[capture.grammar_index].get(capture.index)
5461 {
5462 highlights.stack.push((next_capture_end, capture_id));
5463 }
5464 highlights.next_capture.take();
5465 }
5466 } else if let Some(snapshot) = self.buffer_snapshot {
5467 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5468 *highlights = BufferChunkHighlights {
5469 captures,
5470 next_capture: None,
5471 stack: Default::default(),
5472 highlight_maps,
5473 };
5474 } else {
5475 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5476 // Seeking such BufferChunks is not supported.
5477 debug_assert!(
5478 false,
5479 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5480 );
5481 }
5482
5483 highlights.captures.set_byte_range(self.range.clone());
5484 self.initialize_diagnostic_endpoints();
5485 }
5486 }
5487
5488 fn initialize_diagnostic_endpoints(&mut self) {
5489 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5490 && let Some(buffer) = self.buffer_snapshot
5491 {
5492 let mut diagnostic_endpoints = Vec::new();
5493 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5494 diagnostic_endpoints.push(DiagnosticEndpoint {
5495 offset: entry.range.start,
5496 is_start: true,
5497 severity: entry.diagnostic.severity,
5498 is_unnecessary: entry.diagnostic.is_unnecessary,
5499 underline: entry.diagnostic.underline,
5500 });
5501 diagnostic_endpoints.push(DiagnosticEndpoint {
5502 offset: entry.range.end,
5503 is_start: false,
5504 severity: entry.diagnostic.severity,
5505 is_unnecessary: entry.diagnostic.is_unnecessary,
5506 underline: entry.diagnostic.underline,
5507 });
5508 }
5509 diagnostic_endpoints
5510 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5511 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5512 self.hint_depth = 0;
5513 self.error_depth = 0;
5514 self.warning_depth = 0;
5515 self.information_depth = 0;
5516 }
5517 }
5518
5519 /// The current byte offset in the buffer.
5520 pub fn offset(&self) -> usize {
5521 self.range.start
5522 }
5523
5524 pub fn range(&self) -> Range<usize> {
5525 self.range.clone()
5526 }
5527
5528 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5529 let depth = match endpoint.severity {
5530 DiagnosticSeverity::ERROR => &mut self.error_depth,
5531 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5532 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5533 DiagnosticSeverity::HINT => &mut self.hint_depth,
5534 _ => return,
5535 };
5536 if endpoint.is_start {
5537 *depth += 1;
5538 } else {
5539 *depth -= 1;
5540 }
5541
5542 if endpoint.is_unnecessary {
5543 if endpoint.is_start {
5544 self.unnecessary_depth += 1;
5545 } else {
5546 self.unnecessary_depth -= 1;
5547 }
5548 }
5549 }
5550
5551 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5552 if self.error_depth > 0 {
5553 Some(DiagnosticSeverity::ERROR)
5554 } else if self.warning_depth > 0 {
5555 Some(DiagnosticSeverity::WARNING)
5556 } else if self.information_depth > 0 {
5557 Some(DiagnosticSeverity::INFORMATION)
5558 } else if self.hint_depth > 0 {
5559 Some(DiagnosticSeverity::HINT)
5560 } else {
5561 None
5562 }
5563 }
5564
5565 fn current_code_is_unnecessary(&self) -> bool {
5566 self.unnecessary_depth > 0
5567 }
5568}
5569
5570impl<'a> Iterator for BufferChunks<'a> {
5571 type Item = Chunk<'a>;
5572
5573 fn next(&mut self) -> Option<Self::Item> {
5574 let mut next_capture_start = usize::MAX;
5575 let mut next_diagnostic_endpoint = usize::MAX;
5576
5577 if let Some(highlights) = self.highlights.as_mut() {
5578 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5579 if *parent_capture_end <= self.range.start {
5580 highlights.stack.pop();
5581 } else {
5582 break;
5583 }
5584 }
5585
5586 if highlights.next_capture.is_none() {
5587 highlights.next_capture = highlights.captures.next();
5588 }
5589
5590 while let Some(capture) = highlights.next_capture.as_ref() {
5591 if self.range.start < capture.node.start_byte() {
5592 next_capture_start = capture.node.start_byte();
5593 break;
5594 } else {
5595 let highlight_id =
5596 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5597 if let Some(highlight_id) = highlight_id {
5598 highlights
5599 .stack
5600 .push((capture.node.end_byte(), highlight_id));
5601 }
5602 highlights.next_capture = highlights.captures.next();
5603 }
5604 }
5605 }
5606
5607 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5608 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5609 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5610 if endpoint.offset <= self.range.start {
5611 self.update_diagnostic_depths(endpoint);
5612 diagnostic_endpoints.next();
5613 self.underline = endpoint.underline;
5614 } else {
5615 next_diagnostic_endpoint = endpoint.offset;
5616 break;
5617 }
5618 }
5619 }
5620 self.diagnostic_endpoints = diagnostic_endpoints;
5621
5622 if let Some(ChunkBitmaps {
5623 text: chunk,
5624 chars: chars_map,
5625 tabs,
5626 newlines,
5627 }) = self.chunks.peek_with_bitmaps()
5628 {
5629 let chunk_start = self.range.start;
5630 let mut chunk_end = (self.chunks.offset() + chunk.len())
5631 .min(next_capture_start)
5632 .min(next_diagnostic_endpoint);
5633 let mut highlight_id = None;
5634 if let Some(highlights) = self.highlights.as_ref()
5635 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5636 {
5637 chunk_end = chunk_end.min(*parent_capture_end);
5638 highlight_id = Some(*parent_highlight_id);
5639 }
5640 let bit_start = chunk_start - self.chunks.offset();
5641 let bit_end = chunk_end - self.chunks.offset();
5642
5643 let slice = &chunk[bit_start..bit_end];
5644
5645 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5646 let tabs = (tabs >> bit_start) & mask;
5647 let chars = (chars_map >> bit_start) & mask;
5648 let newlines = (newlines >> bit_start) & mask;
5649
5650 self.range.start = chunk_end;
5651 if self.range.start == self.chunks.offset() + chunk.len() {
5652 self.chunks.next().unwrap();
5653 }
5654
5655 Some(Chunk {
5656 text: slice,
5657 syntax_highlight_id: highlight_id,
5658 underline: self.underline,
5659 diagnostic_severity: self.current_diagnostic_severity(),
5660 is_unnecessary: self.current_code_is_unnecessary(),
5661 tabs,
5662 chars,
5663 newlines,
5664 ..Chunk::default()
5665 })
5666 } else {
5667 None
5668 }
5669 }
5670}
5671
5672impl operation_queue::Operation for Operation {
5673 fn lamport_timestamp(&self) -> clock::Lamport {
5674 match self {
5675 Operation::Buffer(_) => {
5676 unreachable!("buffer operations should never be deferred at this layer")
5677 }
5678 Operation::UpdateDiagnostics {
5679 lamport_timestamp, ..
5680 }
5681 | Operation::UpdateSelections {
5682 lamport_timestamp, ..
5683 }
5684 | Operation::UpdateCompletionTriggers {
5685 lamport_timestamp, ..
5686 }
5687 | Operation::UpdateLineEnding {
5688 lamport_timestamp, ..
5689 } => *lamport_timestamp,
5690 }
5691 }
5692}
5693
5694impl IndentSize {
5695 /// Returns an [`IndentSize`] representing the given spaces.
5696 pub fn spaces(len: u32) -> Self {
5697 Self {
5698 len,
5699 kind: IndentKind::Space,
5700 }
5701 }
5702
5703 /// Returns an [`IndentSize`] representing a tab.
5704 pub fn tab() -> Self {
5705 Self {
5706 len: 1,
5707 kind: IndentKind::Tab,
5708 }
5709 }
5710
5711 /// An iterator over the characters represented by this [`IndentSize`].
5712 pub fn chars(&self) -> impl Iterator<Item = char> {
5713 iter::repeat(self.char()).take(self.len as usize)
5714 }
5715
5716 /// The character representation of this [`IndentSize`].
5717 pub fn char(&self) -> char {
5718 match self.kind {
5719 IndentKind::Space => ' ',
5720 IndentKind::Tab => '\t',
5721 }
5722 }
5723
5724 /// Consumes the current [`IndentSize`] and returns a new one that has
5725 /// been shrunk or enlarged by the given size along the given direction.
5726 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5727 match direction {
5728 Ordering::Less => {
5729 if self.kind == size.kind && self.len >= size.len {
5730 self.len -= size.len;
5731 }
5732 }
5733 Ordering::Equal => {}
5734 Ordering::Greater => {
5735 if self.len == 0 {
5736 self = size;
5737 } else if self.kind == size.kind {
5738 self.len += size.len;
5739 }
5740 }
5741 }
5742 self
5743 }
5744
5745 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5746 match self.kind {
5747 IndentKind::Space => self.len as usize,
5748 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5749 }
5750 }
5751}
5752
5753#[cfg(any(test, feature = "test-support"))]
5754pub struct TestFile {
5755 pub path: Arc<RelPath>,
5756 pub root_name: String,
5757 pub local_root: Option<PathBuf>,
5758}
5759
5760#[cfg(any(test, feature = "test-support"))]
5761impl File for TestFile {
5762 fn path(&self) -> &Arc<RelPath> {
5763 &self.path
5764 }
5765
5766 fn full_path(&self, _: &gpui::App) -> PathBuf {
5767 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5768 }
5769
5770 fn as_local(&self) -> Option<&dyn LocalFile> {
5771 if self.local_root.is_some() {
5772 Some(self)
5773 } else {
5774 None
5775 }
5776 }
5777
5778 fn disk_state(&self) -> DiskState {
5779 unimplemented!()
5780 }
5781
5782 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5783 self.path().file_name().unwrap_or(self.root_name.as_ref())
5784 }
5785
5786 fn worktree_id(&self, _: &App) -> WorktreeId {
5787 WorktreeId::from_usize(0)
5788 }
5789
5790 fn to_proto(&self, _: &App) -> rpc::proto::File {
5791 unimplemented!()
5792 }
5793
5794 fn is_private(&self) -> bool {
5795 false
5796 }
5797
5798 fn path_style(&self, _cx: &App) -> PathStyle {
5799 PathStyle::local()
5800 }
5801}
5802
5803#[cfg(any(test, feature = "test-support"))]
5804impl LocalFile for TestFile {
5805 fn abs_path(&self, _cx: &App) -> PathBuf {
5806 PathBuf::from(self.local_root.as_ref().unwrap())
5807 .join(&self.root_name)
5808 .join(self.path.as_std_path())
5809 }
5810
5811 fn load(&self, _cx: &App) -> Task<Result<String>> {
5812 unimplemented!()
5813 }
5814
5815 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5816 unimplemented!()
5817 }
5818}
5819
5820pub(crate) fn contiguous_ranges(
5821 values: impl Iterator<Item = u32>,
5822 max_len: usize,
5823) -> impl Iterator<Item = Range<u32>> {
5824 let mut values = values;
5825 let mut current_range: Option<Range<u32>> = None;
5826 std::iter::from_fn(move || {
5827 loop {
5828 if let Some(value) = values.next() {
5829 if let Some(range) = &mut current_range
5830 && value == range.end
5831 && range.len() < max_len
5832 {
5833 range.end += 1;
5834 continue;
5835 }
5836
5837 let prev_range = current_range.clone();
5838 current_range = Some(value..(value + 1));
5839 if prev_range.is_some() {
5840 return prev_range;
5841 }
5842 } else {
5843 return current_range.take();
5844 }
5845 }
5846 })
5847}
5848
5849#[derive(Default, Debug)]
5850pub struct CharClassifier {
5851 scope: Option<LanguageScope>,
5852 scope_context: Option<CharScopeContext>,
5853 ignore_punctuation: bool,
5854}
5855
5856impl CharClassifier {
5857 pub fn new(scope: Option<LanguageScope>) -> Self {
5858 Self {
5859 scope,
5860 scope_context: None,
5861 ignore_punctuation: false,
5862 }
5863 }
5864
5865 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5866 Self {
5867 scope_context,
5868 ..self
5869 }
5870 }
5871
5872 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5873 Self {
5874 ignore_punctuation,
5875 ..self
5876 }
5877 }
5878
5879 pub fn is_whitespace(&self, c: char) -> bool {
5880 self.kind(c) == CharKind::Whitespace
5881 }
5882
5883 pub fn is_word(&self, c: char) -> bool {
5884 self.kind(c) == CharKind::Word
5885 }
5886
5887 pub fn is_punctuation(&self, c: char) -> bool {
5888 self.kind(c) == CharKind::Punctuation
5889 }
5890
5891 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5892 if c.is_alphanumeric() || c == '_' {
5893 return CharKind::Word;
5894 }
5895
5896 if let Some(scope) = &self.scope {
5897 let characters = match self.scope_context {
5898 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5899 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5900 None => scope.word_characters(),
5901 };
5902 if let Some(characters) = characters
5903 && characters.contains(&c)
5904 {
5905 return CharKind::Word;
5906 }
5907 }
5908
5909 if c.is_whitespace() {
5910 return CharKind::Whitespace;
5911 }
5912
5913 if ignore_punctuation {
5914 CharKind::Word
5915 } else {
5916 CharKind::Punctuation
5917 }
5918 }
5919
5920 pub fn kind(&self, c: char) -> CharKind {
5921 self.kind_with(c, self.ignore_punctuation)
5922 }
5923}
5924
5925/// Find all of the ranges of whitespace that occur at the ends of lines
5926/// in the given rope.
5927///
5928/// This could also be done with a regex search, but this implementation
5929/// avoids copying text.
5930pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5931 let mut ranges = Vec::new();
5932
5933 let mut offset = 0;
5934 let mut prev_chunk_trailing_whitespace_range = 0..0;
5935 for chunk in rope.chunks() {
5936 let mut prev_line_trailing_whitespace_range = 0..0;
5937 for (i, line) in chunk.split('\n').enumerate() {
5938 let line_end_offset = offset + line.len();
5939 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5940 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5941
5942 if i == 0 && trimmed_line_len == 0 {
5943 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5944 }
5945 if !prev_line_trailing_whitespace_range.is_empty() {
5946 ranges.push(prev_line_trailing_whitespace_range);
5947 }
5948
5949 offset = line_end_offset + 1;
5950 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5951 }
5952
5953 offset -= 1;
5954 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5955 }
5956
5957 if !prev_chunk_trailing_whitespace_range.is_empty() {
5958 ranges.push(prev_chunk_trailing_whitespace_range);
5959 }
5960
5961 ranges
5962}