1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, ModelineSettings, Outline, OutlineConfig, PLAIN_TEXT,
5 RunnableCapture, RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{AutoIndentMode, LanguageSettings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, HighlightId, HighlightMap, Language, LanguageRegistry, diagnostic_set::DiagnosticSet,
20 proto,
21};
22
23use anyhow::{Context as _, Result};
24use clock::Lamport;
25pub use clock::ReplicaId;
26use collections::{HashMap, HashSet};
27use encoding_rs::Encoding;
28use fs::MTime;
29use futures::channel::oneshot;
30use gpui::{
31 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
32 Task, TextStyle,
33};
34
35use lsp::LanguageServerId;
36use parking_lot::Mutex;
37use settings::WorktreeId;
38use smallvec::SmallVec;
39use smol::future::yield_now;
40use std::{
41 any::Any,
42 borrow::Cow,
43 cell::Cell,
44 cmp::{self, Ordering, Reverse},
45 collections::{BTreeMap, BTreeSet},
46 future::Future,
47 iter::{self, Iterator, Peekable},
48 mem,
49 num::NonZeroU32,
50 ops::{Deref, Range},
51 path::PathBuf,
52 rc,
53 sync::Arc,
54 time::{Duration, Instant},
55 vec,
56};
57use sum_tree::TreeMap;
58use text::operation_queue::OperationQueue;
59use text::*;
60pub use text::{
61 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
62 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
63 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
64 ToPointUtf16, Transaction, TransactionId, Unclipped,
65};
66use theme::{ActiveTheme as _, SyntaxTheme};
67#[cfg(any(test, feature = "test-support"))]
68use util::RandomCharIter;
69use util::{RangeExt, maybe, paths::PathStyle, rel_path::RelPath};
70
71#[cfg(any(test, feature = "test-support"))]
72pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
73
74pub use lsp::DiagnosticSeverity;
75
76/// Indicate whether a [`Buffer`] has permissions to edit.
77#[derive(PartialEq, Clone, Copy, Debug)]
78pub enum Capability {
79 /// The buffer is a mutable replica.
80 ReadWrite,
81 /// The buffer is a mutable replica, but toggled to be only readable.
82 Read,
83 /// The buffer is a read-only replica.
84 ReadOnly,
85}
86
87impl Capability {
88 /// Returns `true` if the capability is `ReadWrite`.
89 pub fn editable(self) -> bool {
90 matches!(self, Capability::ReadWrite)
91 }
92}
93
94pub type BufferRow = u32;
95
96/// An in-memory representation of a source code file, including its text,
97/// syntax trees, git status, and diagnostics.
98pub struct Buffer {
99 text: TextBuffer,
100 /// Filesystem state, `None` when there is no path.
101 file: Option<Arc<dyn File>>,
102 /// The mtime of the file when this buffer was last loaded from
103 /// or saved to disk.
104 saved_mtime: Option<MTime>,
105 /// The version vector when this buffer was last loaded from
106 /// or saved to disk.
107 saved_version: clock::Global,
108 preview_version: clock::Global,
109 transaction_depth: usize,
110 was_dirty_before_starting_transaction: Option<bool>,
111 reload_task: Option<Task<Result<()>>>,
112 language: Option<Arc<Language>>,
113 autoindent_requests: Vec<Arc<AutoindentRequest>>,
114 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
115 pending_autoindent: Option<Task<()>>,
116 sync_parse_timeout: Option<Duration>,
117 syntax_map: Mutex<SyntaxMap>,
118 reparse: Option<Task<()>>,
119 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
120 non_text_state_update_count: usize,
121 diagnostics: TreeMap<LanguageServerId, DiagnosticSet>,
122 remote_selections: TreeMap<ReplicaId, SelectionSet>,
123 diagnostics_timestamp: clock::Lamport,
124 completion_triggers: BTreeSet<String>,
125 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
126 completion_triggers_timestamp: clock::Lamport,
127 deferred_ops: OperationQueue<Operation>,
128 capability: Capability,
129 has_conflict: bool,
130 /// Memoize calls to has_changes_since(saved_version).
131 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
132 has_unsaved_edits: Cell<(clock::Global, bool)>,
133 change_bits: Vec<rc::Weak<Cell<bool>>>,
134 modeline: Option<Arc<ModelineSettings>>,
135 _subscriptions: Vec<gpui::Subscription>,
136 tree_sitter_data: Arc<TreeSitterData>,
137 encoding: &'static Encoding,
138 has_bom: bool,
139 reload_with_encoding_txns: HashMap<TransactionId, (&'static Encoding, bool)>,
140}
141
142#[derive(Debug)]
143pub struct TreeSitterData {
144 chunks: RowChunks,
145 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
146}
147
148const MAX_ROWS_IN_A_CHUNK: u32 = 50;
149
150impl TreeSitterData {
151 fn clear(&mut self, snapshot: &text::BufferSnapshot) {
152 self.chunks = RowChunks::new(&snapshot, MAX_ROWS_IN_A_CHUNK);
153 self.brackets_by_chunks.get_mut().clear();
154 self.brackets_by_chunks
155 .get_mut()
156 .resize(self.chunks.len(), None);
157 }
158
159 fn new(snapshot: &text::BufferSnapshot) -> Self {
160 let chunks = RowChunks::new(&snapshot, MAX_ROWS_IN_A_CHUNK);
161 Self {
162 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
163 chunks,
164 }
165 }
166
167 fn version(&self) -> &clock::Global {
168 self.chunks.version()
169 }
170}
171
172#[derive(Copy, Clone, Debug, PartialEq, Eq)]
173pub enum ParseStatus {
174 Idle,
175 Parsing,
176}
177
178/// An immutable, cheaply cloneable representation of a fixed
179/// state of a buffer.
180pub struct BufferSnapshot {
181 pub text: text::BufferSnapshot,
182 pub(crate) syntax: SyntaxSnapshot,
183 tree_sitter_data: Arc<TreeSitterData>,
184 diagnostics: TreeMap<LanguageServerId, DiagnosticSet>,
185 remote_selections: TreeMap<ReplicaId, SelectionSet>,
186 language: Option<Arc<Language>>,
187 file: Option<Arc<dyn File>>,
188 non_text_state_update_count: usize,
189 pub capability: Capability,
190 modeline: Option<Arc<ModelineSettings>>,
191}
192
193/// The kind and amount of indentation in a particular line. For now,
194/// assumes that indentation is all the same character.
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
196pub struct IndentSize {
197 /// The number of bytes that comprise the indentation.
198 pub len: u32,
199 /// The kind of whitespace used for indentation.
200 pub kind: IndentKind,
201}
202
203/// A whitespace character that's used for indentation.
204#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
205pub enum IndentKind {
206 /// An ASCII space character.
207 #[default]
208 Space,
209 /// An ASCII tab character.
210 Tab,
211}
212
213/// The shape of a selection cursor.
214#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
215pub enum CursorShape {
216 /// A vertical bar
217 #[default]
218 Bar,
219 /// A block that surrounds the following character
220 Block,
221 /// An underline that runs along the following character
222 Underline,
223 /// A box drawn around the following character
224 Hollow,
225}
226
227impl From<settings::CursorShape> for CursorShape {
228 fn from(shape: settings::CursorShape) -> Self {
229 match shape {
230 settings::CursorShape::Bar => CursorShape::Bar,
231 settings::CursorShape::Block => CursorShape::Block,
232 settings::CursorShape::Underline => CursorShape::Underline,
233 settings::CursorShape::Hollow => CursorShape::Hollow,
234 }
235 }
236}
237
238#[derive(Clone, Debug)]
239struct SelectionSet {
240 line_mode: bool,
241 cursor_shape: CursorShape,
242 selections: Arc<[Selection<Anchor>]>,
243 lamport_timestamp: clock::Lamport,
244}
245
246/// An operation used to synchronize this buffer with its other replicas.
247#[derive(Clone, Debug, PartialEq)]
248pub enum Operation {
249 /// A text operation.
250 Buffer(text::Operation),
251
252 /// An update to the buffer's diagnostics.
253 UpdateDiagnostics {
254 /// The id of the language server that produced the new diagnostics.
255 server_id: LanguageServerId,
256 /// The diagnostics.
257 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
258 /// The buffer's lamport timestamp.
259 lamport_timestamp: clock::Lamport,
260 },
261
262 /// An update to the most recent selections in this buffer.
263 UpdateSelections {
264 /// The selections.
265 selections: Arc<[Selection<Anchor>]>,
266 /// The buffer's lamport timestamp.
267 lamport_timestamp: clock::Lamport,
268 /// Whether the selections are in 'line mode'.
269 line_mode: bool,
270 /// The [`CursorShape`] associated with these selections.
271 cursor_shape: CursorShape,
272 },
273
274 /// An update to the characters that should trigger autocompletion
275 /// for this buffer.
276 UpdateCompletionTriggers {
277 /// The characters that trigger autocompletion.
278 triggers: Vec<String>,
279 /// The buffer's lamport timestamp.
280 lamport_timestamp: clock::Lamport,
281 /// The language server ID.
282 server_id: LanguageServerId,
283 },
284
285 /// An update to the line ending type of this buffer.
286 UpdateLineEnding {
287 /// The line ending type.
288 line_ending: LineEnding,
289 /// The buffer's lamport timestamp.
290 lamport_timestamp: clock::Lamport,
291 },
292}
293
294/// An event that occurs in a buffer.
295#[derive(Clone, Debug, PartialEq)]
296pub enum BufferEvent {
297 /// The buffer was changed in a way that must be
298 /// propagated to its other replicas.
299 Operation {
300 operation: Operation,
301 is_local: bool,
302 },
303 /// The buffer was edited.
304 Edited { is_local: bool },
305 /// The buffer's `dirty` bit changed.
306 DirtyChanged,
307 /// The buffer was saved.
308 Saved,
309 /// The buffer's file was changed on disk.
310 FileHandleChanged,
311 /// The buffer was reloaded.
312 Reloaded,
313 /// The buffer is in need of a reload
314 ReloadNeeded,
315 /// The buffer's language was changed.
316 /// The boolean indicates whether this buffer did not have a language before, but does now.
317 LanguageChanged(bool),
318 /// The buffer's syntax trees were updated.
319 Reparsed,
320 /// The buffer's diagnostics were updated.
321 DiagnosticsUpdated,
322 /// The buffer gained or lost editing capabilities.
323 CapabilityChanged,
324}
325
326/// The file associated with a buffer.
327pub trait File: Send + Sync + Any {
328 /// Returns the [`LocalFile`] associated with this file, if the
329 /// file is local.
330 fn as_local(&self) -> Option<&dyn LocalFile>;
331
332 /// Returns whether this file is local.
333 fn is_local(&self) -> bool {
334 self.as_local().is_some()
335 }
336
337 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
338 /// only available in some states, such as modification time.
339 fn disk_state(&self) -> DiskState;
340
341 /// Returns the path of this file relative to the worktree's root directory.
342 fn path(&self) -> &Arc<RelPath>;
343
344 /// Returns the path of this file relative to the worktree's parent directory (this means it
345 /// includes the name of the worktree's root folder).
346 fn full_path(&self, cx: &App) -> PathBuf;
347
348 /// Returns the path style of this file.
349 fn path_style(&self, cx: &App) -> PathStyle;
350
351 /// Returns the last component of this handle's absolute path. If this handle refers to the root
352 /// of its worktree, then this method will return the name of the worktree itself.
353 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
354
355 /// Returns the id of the worktree to which this file belongs.
356 ///
357 /// This is needed for looking up project-specific settings.
358 fn worktree_id(&self, cx: &App) -> WorktreeId;
359
360 /// Converts this file into a protobuf message.
361 fn to_proto(&self, cx: &App) -> rpc::proto::File;
362
363 /// Return whether Zed considers this to be a private file.
364 fn is_private(&self) -> bool;
365
366 fn can_open(&self) -> bool {
367 !self.is_local()
368 }
369}
370
371/// The file's storage status - whether it's stored (`Present`), and if so when it was last
372/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
373/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
374/// indicator for new files.
375#[derive(Copy, Clone, Debug, PartialEq)]
376pub enum DiskState {
377 /// File created in Zed that has not been saved.
378 New,
379 /// File present on the filesystem.
380 Present { mtime: MTime, size: u64 },
381 /// Deleted file that was previously present.
382 Deleted,
383 /// An old version of a file that was previously present
384 /// usually from a version control system. e.g. A git blob
385 Historic { was_deleted: bool },
386}
387
388impl DiskState {
389 /// Returns the file's last known modification time on disk.
390 pub fn mtime(self) -> Option<MTime> {
391 match self {
392 DiskState::New => None,
393 DiskState::Present { mtime, .. } => Some(mtime),
394 DiskState::Deleted => None,
395 DiskState::Historic { .. } => None,
396 }
397 }
398
399 /// Returns the file's size on disk in bytes.
400 pub fn size(self) -> Option<u64> {
401 match self {
402 DiskState::New => None,
403 DiskState::Present { size, .. } => Some(size),
404 DiskState::Deleted => None,
405 DiskState::Historic { .. } => None,
406 }
407 }
408
409 pub fn exists(&self) -> bool {
410 match self {
411 DiskState::New => false,
412 DiskState::Present { .. } => true,
413 DiskState::Deleted => false,
414 DiskState::Historic { .. } => false,
415 }
416 }
417
418 /// Returns true if this state represents a deleted file.
419 pub fn is_deleted(&self) -> bool {
420 match self {
421 DiskState::Deleted => true,
422 DiskState::Historic { was_deleted } => *was_deleted,
423 _ => false,
424 }
425 }
426}
427
428/// The file associated with a buffer, in the case where the file is on the local disk.
429pub trait LocalFile: File {
430 /// Returns the absolute path of this file
431 fn abs_path(&self, cx: &App) -> PathBuf;
432
433 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
434 fn load(&self, cx: &App) -> Task<Result<String>>;
435
436 /// Loads the file's contents from disk.
437 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
438}
439
440/// The auto-indent behavior associated with an editing operation.
441/// For some editing operations, each affected line of text has its
442/// indentation recomputed. For other operations, the entire block
443/// of edited text is adjusted uniformly.
444#[derive(Clone, Debug)]
445pub enum AutoindentMode {
446 /// Indent each line of inserted text.
447 EachLine,
448 /// Apply the same indentation adjustment to all of the lines
449 /// in a given insertion.
450 Block {
451 /// The original indentation column of the first line of each
452 /// insertion, if it has been copied.
453 ///
454 /// Knowing this makes it possible to preserve the relative indentation
455 /// of every line in the insertion from when it was copied.
456 ///
457 /// If the original indent column is `a`, and the first line of insertion
458 /// is then auto-indented to column `b`, then every other line of
459 /// the insertion will be auto-indented to column `b - a`
460 original_indent_columns: Vec<Option<u32>>,
461 },
462}
463
464#[derive(Clone)]
465struct AutoindentRequest {
466 before_edit: BufferSnapshot,
467 entries: Vec<AutoindentRequestEntry>,
468 is_block_mode: bool,
469 ignore_empty_lines: bool,
470}
471
472#[derive(Debug, Clone)]
473struct AutoindentRequestEntry {
474 /// A range of the buffer whose indentation should be adjusted.
475 range: Range<Anchor>,
476 /// The row of the edit start in the buffer before the edit was applied.
477 /// This is stored here because the anchor in range is created after
478 /// the edit, so it cannot be used with the before_edit snapshot.
479 old_row: Option<u32>,
480 indent_size: IndentSize,
481 original_indent_column: Option<u32>,
482}
483
484#[derive(Debug)]
485struct IndentSuggestion {
486 basis_row: u32,
487 delta: Ordering,
488 within_error: bool,
489}
490
491struct BufferChunkHighlights<'a> {
492 captures: SyntaxMapCaptures<'a>,
493 next_capture: Option<SyntaxMapCapture<'a>>,
494 stack: Vec<(usize, HighlightId)>,
495 highlight_maps: Vec<HighlightMap>,
496}
497
498/// An iterator that yields chunks of a buffer's text, along with their
499/// syntax highlights and diagnostic status.
500pub struct BufferChunks<'a> {
501 buffer_snapshot: Option<&'a BufferSnapshot>,
502 range: Range<usize>,
503 chunks: text::Chunks<'a>,
504 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
505 error_depth: usize,
506 warning_depth: usize,
507 information_depth: usize,
508 hint_depth: usize,
509 unnecessary_depth: usize,
510 underline: bool,
511 highlights: Option<BufferChunkHighlights<'a>>,
512}
513
514/// A chunk of a buffer's text, along with its syntax highlight and
515/// diagnostic status.
516#[derive(Clone, Debug, Default)]
517pub struct Chunk<'a> {
518 /// The text of the chunk.
519 pub text: &'a str,
520 /// The syntax highlighting style of the chunk.
521 pub syntax_highlight_id: Option<HighlightId>,
522 /// The highlight style that has been applied to this chunk in
523 /// the editor.
524 pub highlight_style: Option<HighlightStyle>,
525 /// The severity of diagnostic associated with this chunk, if any.
526 pub diagnostic_severity: Option<DiagnosticSeverity>,
527 /// A bitset of which characters are tabs in this string.
528 pub tabs: u128,
529 /// Bitmap of character indices in this chunk
530 pub chars: u128,
531 /// Bitmap of newline indices in this chunk
532 pub newlines: u128,
533 /// Whether this chunk of text is marked as unnecessary.
534 pub is_unnecessary: bool,
535 /// Whether this chunk of text was originally a tab character.
536 pub is_tab: bool,
537 /// Whether this chunk of text was originally an inlay.
538 pub is_inlay: bool,
539 /// Whether to underline the corresponding text range in the editor.
540 pub underline: bool,
541}
542
543/// A set of edits to a given version of a buffer, computed asynchronously.
544#[derive(Debug, Clone)]
545pub struct Diff {
546 pub base_version: clock::Global,
547 pub line_ending: LineEnding,
548 pub edits: Vec<(Range<usize>, Arc<str>)>,
549}
550
551#[derive(Debug, Clone, Copy)]
552pub(crate) struct DiagnosticEndpoint {
553 offset: usize,
554 is_start: bool,
555 underline: bool,
556 severity: DiagnosticSeverity,
557 is_unnecessary: bool,
558}
559
560/// A class of characters, used for characterizing a run of text.
561#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
562pub enum CharKind {
563 /// Whitespace.
564 Whitespace,
565 /// Punctuation.
566 Punctuation,
567 /// Word.
568 Word,
569}
570
571/// Context for character classification within a specific scope.
572#[derive(Copy, Clone, Eq, PartialEq, Debug)]
573pub enum CharScopeContext {
574 /// Character classification for completion queries.
575 ///
576 /// This context treats certain characters as word constituents that would
577 /// normally be considered punctuation, such as '-' in Tailwind classes
578 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
579 Completion,
580 /// Character classification for linked edits.
581 ///
582 /// This context handles characters that should be treated as part of
583 /// identifiers during linked editing operations, such as '.' in JSX
584 /// component names like `<Animated.View>`.
585 LinkedEdit,
586}
587
588/// A runnable is a set of data about a region that could be resolved into a task
589pub struct Runnable {
590 pub tags: SmallVec<[RunnableTag; 1]>,
591 pub language: Arc<Language>,
592 pub buffer: BufferId,
593}
594
595#[derive(Default, Clone, Debug)]
596pub struct HighlightedText {
597 pub text: SharedString,
598 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
599}
600
601#[derive(Default, Debug)]
602struct HighlightedTextBuilder {
603 pub text: String,
604 highlights: Vec<(Range<usize>, HighlightStyle)>,
605}
606
607impl HighlightedText {
608 pub fn from_buffer_range<T: ToOffset>(
609 range: Range<T>,
610 snapshot: &text::BufferSnapshot,
611 syntax_snapshot: &SyntaxSnapshot,
612 override_style: Option<HighlightStyle>,
613 syntax_theme: &SyntaxTheme,
614 ) -> Self {
615 let mut highlighted_text = HighlightedTextBuilder::default();
616 highlighted_text.add_text_from_buffer_range(
617 range,
618 snapshot,
619 syntax_snapshot,
620 override_style,
621 syntax_theme,
622 );
623 highlighted_text.build()
624 }
625
626 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
627 gpui::StyledText::new(self.text.clone())
628 .with_default_highlights(default_style, self.highlights.iter().cloned())
629 }
630
631 /// Returns the first line without leading whitespace unless highlighted
632 /// and a boolean indicating if there are more lines after
633 pub fn first_line_preview(self) -> (Self, bool) {
634 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
635 let first_line = &self.text[..newline_ix];
636
637 // Trim leading whitespace, unless an edit starts prior to it.
638 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
639 if let Some((first_highlight_range, _)) = self.highlights.first() {
640 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
641 }
642
643 let preview_text = &first_line[preview_start_ix..];
644 let preview_highlights = self
645 .highlights
646 .into_iter()
647 .skip_while(|(range, _)| range.end <= preview_start_ix)
648 .take_while(|(range, _)| range.start < newline_ix)
649 .filter_map(|(mut range, highlight)| {
650 range.start = range.start.saturating_sub(preview_start_ix);
651 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
652 if range.is_empty() {
653 None
654 } else {
655 Some((range, highlight))
656 }
657 });
658
659 let preview = Self {
660 text: SharedString::new(preview_text),
661 highlights: preview_highlights.collect(),
662 };
663
664 (preview, self.text.len() > newline_ix)
665 }
666}
667
668impl HighlightedTextBuilder {
669 pub fn build(self) -> HighlightedText {
670 HighlightedText {
671 text: self.text.into(),
672 highlights: self.highlights,
673 }
674 }
675
676 pub fn add_text_from_buffer_range<T: ToOffset>(
677 &mut self,
678 range: Range<T>,
679 snapshot: &text::BufferSnapshot,
680 syntax_snapshot: &SyntaxSnapshot,
681 override_style: Option<HighlightStyle>,
682 syntax_theme: &SyntaxTheme,
683 ) {
684 let range = range.to_offset(snapshot);
685 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
686 let start = self.text.len();
687 self.text.push_str(chunk.text);
688 let end = self.text.len();
689
690 if let Some(highlight_style) = chunk
691 .syntax_highlight_id
692 .and_then(|id| syntax_theme.get(id).cloned())
693 {
694 let highlight_style = override_style.map_or(highlight_style, |override_style| {
695 highlight_style.highlight(override_style)
696 });
697 self.highlights.push((start..end, highlight_style));
698 } else if let Some(override_style) = override_style {
699 self.highlights.push((start..end, override_style));
700 }
701 }
702 }
703
704 fn highlighted_chunks<'a>(
705 range: Range<usize>,
706 snapshot: &'a text::BufferSnapshot,
707 syntax_snapshot: &'a SyntaxSnapshot,
708 ) -> BufferChunks<'a> {
709 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
710 grammar
711 .highlights_config
712 .as_ref()
713 .map(|config| &config.query)
714 });
715
716 let highlight_maps = captures
717 .grammars()
718 .iter()
719 .map(|grammar| grammar.highlight_map())
720 .collect();
721
722 BufferChunks::new(
723 snapshot.as_rope(),
724 range,
725 Some((captures, highlight_maps)),
726 false,
727 None,
728 )
729 }
730}
731
732#[derive(Clone)]
733pub struct EditPreview {
734 old_snapshot: text::BufferSnapshot,
735 applied_edits_snapshot: text::BufferSnapshot,
736 syntax_snapshot: SyntaxSnapshot,
737}
738
739impl EditPreview {
740 pub fn as_unified_diff(
741 &self,
742 file: Option<&Arc<dyn File>>,
743 edits: &[(Range<Anchor>, impl AsRef<str>)],
744 ) -> Option<String> {
745 let (first, _) = edits.first()?;
746 let (last, _) = edits.last()?;
747
748 let start = first.start.to_point(&self.old_snapshot);
749 let old_end = last.end.to_point(&self.old_snapshot);
750 let new_end = last
751 .end
752 .bias_right(&self.old_snapshot)
753 .to_point(&self.applied_edits_snapshot);
754
755 let start = Point::new(start.row.saturating_sub(3), 0);
756 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
757 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
758
759 let diff_body = unified_diff_with_offsets(
760 &self
761 .old_snapshot
762 .text_for_range(start..old_end)
763 .collect::<String>(),
764 &self
765 .applied_edits_snapshot
766 .text_for_range(start..new_end)
767 .collect::<String>(),
768 start.row,
769 start.row,
770 );
771
772 let path = file.map(|f| f.path().as_unix_str());
773 let header = match path {
774 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
775 None => String::new(),
776 };
777
778 Some(format!("{}{}", header, diff_body))
779 }
780
781 pub fn highlight_edits(
782 &self,
783 current_snapshot: &BufferSnapshot,
784 edits: &[(Range<Anchor>, impl AsRef<str>)],
785 include_deletions: bool,
786 cx: &App,
787 ) -> HighlightedText {
788 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
789 return HighlightedText::default();
790 };
791
792 let mut highlighted_text = HighlightedTextBuilder::default();
793
794 let visible_range_in_preview_snapshot =
795 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
796 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
797
798 let insertion_highlight_style = HighlightStyle {
799 background_color: Some(cx.theme().status().created_background),
800 ..Default::default()
801 };
802 let deletion_highlight_style = HighlightStyle {
803 background_color: Some(cx.theme().status().deleted_background),
804 ..Default::default()
805 };
806 let syntax_theme = cx.theme().syntax();
807
808 for (range, edit_text) in edits {
809 let edit_new_end_in_preview_snapshot = range
810 .end
811 .bias_right(&self.old_snapshot)
812 .to_offset(&self.applied_edits_snapshot);
813 let edit_start_in_preview_snapshot =
814 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
815
816 let unchanged_range_in_preview_snapshot =
817 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
818 if !unchanged_range_in_preview_snapshot.is_empty() {
819 highlighted_text.add_text_from_buffer_range(
820 unchanged_range_in_preview_snapshot,
821 &self.applied_edits_snapshot,
822 &self.syntax_snapshot,
823 None,
824 syntax_theme,
825 );
826 }
827
828 let range_in_current_snapshot = range.to_offset(current_snapshot);
829 if include_deletions && !range_in_current_snapshot.is_empty() {
830 highlighted_text.add_text_from_buffer_range(
831 range_in_current_snapshot,
832 ¤t_snapshot.text,
833 ¤t_snapshot.syntax,
834 Some(deletion_highlight_style),
835 syntax_theme,
836 );
837 }
838
839 if !edit_text.as_ref().is_empty() {
840 highlighted_text.add_text_from_buffer_range(
841 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
842 &self.applied_edits_snapshot,
843 &self.syntax_snapshot,
844 Some(insertion_highlight_style),
845 syntax_theme,
846 );
847 }
848
849 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
850 }
851
852 highlighted_text.add_text_from_buffer_range(
853 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
854 &self.applied_edits_snapshot,
855 &self.syntax_snapshot,
856 None,
857 syntax_theme,
858 );
859
860 highlighted_text.build()
861 }
862
863 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
864 cx.new(|cx| {
865 let mut buffer = Buffer::local_normalized(
866 self.applied_edits_snapshot.as_rope().clone(),
867 self.applied_edits_snapshot.line_ending(),
868 cx,
869 );
870 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
871 buffer
872 })
873 }
874
875 pub fn anchor_to_offset_in_result(&self, anchor: Anchor) -> usize {
876 anchor
877 .bias_right(&self.old_snapshot)
878 .to_offset(&self.applied_edits_snapshot)
879 }
880
881 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
882 let (first, _) = edits.first()?;
883 let (last, _) = edits.last()?;
884
885 let start = first
886 .start
887 .bias_left(&self.old_snapshot)
888 .to_point(&self.applied_edits_snapshot);
889 let end = last
890 .end
891 .bias_right(&self.old_snapshot)
892 .to_point(&self.applied_edits_snapshot);
893
894 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
895 let range = Point::new(start.row, 0)
896 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
897
898 Some(range)
899 }
900}
901
902#[derive(Clone, Debug, PartialEq, Eq)]
903pub struct BracketMatch<T> {
904 pub open_range: Range<T>,
905 pub close_range: Range<T>,
906 pub newline_only: bool,
907 pub syntax_layer_depth: usize,
908 pub color_index: Option<usize>,
909}
910
911impl<T> BracketMatch<T> {
912 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
913 (self.open_range, self.close_range)
914 }
915}
916
917impl Buffer {
918 /// Create a new buffer with the given base text.
919 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
920 Self::build(
921 TextBuffer::new(
922 ReplicaId::LOCAL,
923 cx.entity_id().as_non_zero_u64().into(),
924 base_text.into(),
925 ),
926 None,
927 Capability::ReadWrite,
928 )
929 }
930
931 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
932 pub fn local_normalized(
933 base_text_normalized: Rope,
934 line_ending: LineEnding,
935 cx: &Context<Self>,
936 ) -> Self {
937 Self::build(
938 TextBuffer::new_normalized(
939 ReplicaId::LOCAL,
940 cx.entity_id().as_non_zero_u64().into(),
941 line_ending,
942 base_text_normalized,
943 ),
944 None,
945 Capability::ReadWrite,
946 )
947 }
948
949 /// Create a new buffer that is a replica of a remote buffer.
950 pub fn remote(
951 remote_id: BufferId,
952 replica_id: ReplicaId,
953 capability: Capability,
954 base_text: impl Into<String>,
955 ) -> Self {
956 Self::build(
957 TextBuffer::new(replica_id, remote_id, base_text.into()),
958 None,
959 capability,
960 )
961 }
962
963 /// Create a new buffer that is a replica of a remote buffer, populating its
964 /// state from the given protobuf message.
965 pub fn from_proto(
966 replica_id: ReplicaId,
967 capability: Capability,
968 message: proto::BufferState,
969 file: Option<Arc<dyn File>>,
970 ) -> Result<Self> {
971 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
972 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
973 let mut this = Self::build(buffer, file, capability);
974 this.text.set_line_ending(proto::deserialize_line_ending(
975 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
976 ));
977 this.saved_version = proto::deserialize_version(&message.saved_version);
978 this.saved_mtime = message.saved_mtime.map(|time| time.into());
979 Ok(this)
980 }
981
982 /// Serialize the buffer's state to a protobuf message.
983 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
984 proto::BufferState {
985 id: self.remote_id().into(),
986 file: self.file.as_ref().map(|f| f.to_proto(cx)),
987 base_text: self.base_text().to_string(),
988 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
989 saved_version: proto::serialize_version(&self.saved_version),
990 saved_mtime: self.saved_mtime.map(|time| time.into()),
991 }
992 }
993
994 /// Serialize as protobufs all of the changes to the buffer since the given version.
995 pub fn serialize_ops(
996 &self,
997 since: Option<clock::Global>,
998 cx: &App,
999 ) -> Task<Vec<proto::Operation>> {
1000 let mut operations = Vec::new();
1001 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1002
1003 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1004 proto::serialize_operation(&Operation::UpdateSelections {
1005 selections: set.selections.clone(),
1006 lamport_timestamp: set.lamport_timestamp,
1007 line_mode: set.line_mode,
1008 cursor_shape: set.cursor_shape,
1009 })
1010 }));
1011
1012 for (server_id, diagnostics) in self.diagnostics.iter() {
1013 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1014 lamport_timestamp: self.diagnostics_timestamp,
1015 server_id: *server_id,
1016 diagnostics: diagnostics.iter().cloned().collect(),
1017 }));
1018 }
1019
1020 for (server_id, completions) in &self.completion_triggers_per_language_server {
1021 operations.push(proto::serialize_operation(
1022 &Operation::UpdateCompletionTriggers {
1023 triggers: completions.iter().cloned().collect(),
1024 lamport_timestamp: self.completion_triggers_timestamp,
1025 server_id: *server_id,
1026 },
1027 ));
1028 }
1029
1030 let text_operations = self.text.operations().clone();
1031 cx.background_spawn(async move {
1032 let since = since.unwrap_or_default();
1033 operations.extend(
1034 text_operations
1035 .iter()
1036 .filter(|(_, op)| !since.observed(op.timestamp()))
1037 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1038 );
1039 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1040 operations
1041 })
1042 }
1043
1044 /// Assign a language to the buffer, returning the buffer.
1045 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1046 self.set_language_async(Some(language), cx);
1047 self
1048 }
1049
1050 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1051 #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1052 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1053 self.set_language(Some(language), cx);
1054 self
1055 }
1056
1057 /// Returns the [`Capability`] of this buffer.
1058 pub fn capability(&self) -> Capability {
1059 self.capability
1060 }
1061
1062 /// Whether this buffer can only be read.
1063 pub fn read_only(&self) -> bool {
1064 !self.capability.editable()
1065 }
1066
1067 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1068 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1069 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1070 let snapshot = buffer.snapshot();
1071 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1072 let tree_sitter_data = TreeSitterData::new(snapshot);
1073 Self {
1074 saved_mtime,
1075 tree_sitter_data: Arc::new(tree_sitter_data),
1076 saved_version: buffer.version(),
1077 preview_version: buffer.version(),
1078 reload_task: None,
1079 transaction_depth: 0,
1080 was_dirty_before_starting_transaction: None,
1081 has_unsaved_edits: Cell::new((buffer.version(), false)),
1082 text: buffer,
1083
1084 file,
1085 capability,
1086 syntax_map,
1087 reparse: None,
1088 non_text_state_update_count: 0,
1089 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1090 Some(Duration::from_millis(10))
1091 } else {
1092 Some(Duration::from_millis(1))
1093 },
1094 parse_status: watch::channel(ParseStatus::Idle),
1095 autoindent_requests: Default::default(),
1096 wait_for_autoindent_txs: Default::default(),
1097 pending_autoindent: Default::default(),
1098 language: None,
1099 remote_selections: Default::default(),
1100 diagnostics: Default::default(),
1101 diagnostics_timestamp: Lamport::MIN,
1102 completion_triggers: Default::default(),
1103 completion_triggers_per_language_server: Default::default(),
1104 completion_triggers_timestamp: Lamport::MIN,
1105 deferred_ops: OperationQueue::new(),
1106 has_conflict: false,
1107 change_bits: Default::default(),
1108 modeline: None,
1109 _subscriptions: Vec::new(),
1110 encoding: encoding_rs::UTF_8,
1111 has_bom: false,
1112 reload_with_encoding_txns: HashMap::default(),
1113 }
1114 }
1115
1116 #[ztracing::instrument(skip_all)]
1117 pub fn build_snapshot(
1118 text: Rope,
1119 language: Option<Arc<Language>>,
1120 language_registry: Option<Arc<LanguageRegistry>>,
1121 modeline: Option<Arc<ModelineSettings>>,
1122 cx: &mut App,
1123 ) -> impl Future<Output = BufferSnapshot> + use<> {
1124 let entity_id = cx.reserve_entity::<Self>().entity_id();
1125 let buffer_id = entity_id.as_non_zero_u64().into();
1126 async move {
1127 let text =
1128 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text);
1129 let text = text.into_snapshot();
1130 let mut syntax = SyntaxMap::new(&text).snapshot();
1131 if let Some(language) = language.clone() {
1132 let language_registry = language_registry.clone();
1133 syntax.reparse(&text, language_registry, language);
1134 }
1135 let tree_sitter_data = TreeSitterData::new(&text);
1136 BufferSnapshot {
1137 text,
1138 syntax,
1139 file: None,
1140 diagnostics: Default::default(),
1141 remote_selections: Default::default(),
1142 tree_sitter_data: Arc::new(tree_sitter_data),
1143 language,
1144 non_text_state_update_count: 0,
1145 capability: Capability::ReadOnly,
1146 modeline,
1147 }
1148 }
1149 }
1150
1151 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1152 let entity_id = cx.reserve_entity::<Self>().entity_id();
1153 let buffer_id = entity_id.as_non_zero_u64().into();
1154 let text = TextBuffer::new_normalized(
1155 ReplicaId::LOCAL,
1156 buffer_id,
1157 Default::default(),
1158 Rope::new(),
1159 );
1160 let text = text.into_snapshot();
1161 let syntax = SyntaxMap::new(&text).snapshot();
1162 let tree_sitter_data = TreeSitterData::new(&text);
1163 BufferSnapshot {
1164 text,
1165 syntax,
1166 tree_sitter_data: Arc::new(tree_sitter_data),
1167 file: None,
1168 diagnostics: Default::default(),
1169 remote_selections: Default::default(),
1170 language: None,
1171 non_text_state_update_count: 0,
1172 capability: Capability::ReadOnly,
1173 modeline: None,
1174 }
1175 }
1176
1177 #[cfg(any(test, feature = "test-support"))]
1178 pub fn build_snapshot_sync(
1179 text: Rope,
1180 language: Option<Arc<Language>>,
1181 language_registry: Option<Arc<LanguageRegistry>>,
1182 cx: &mut App,
1183 ) -> BufferSnapshot {
1184 let entity_id = cx.reserve_entity::<Self>().entity_id();
1185 let buffer_id = entity_id.as_non_zero_u64().into();
1186 let text =
1187 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1188 .into_snapshot();
1189 let mut syntax = SyntaxMap::new(&text).snapshot();
1190 if let Some(language) = language.clone() {
1191 syntax.reparse(&text, language_registry, language);
1192 }
1193 let tree_sitter_data = TreeSitterData::new(&text);
1194 BufferSnapshot {
1195 text,
1196 syntax,
1197 tree_sitter_data: Arc::new(tree_sitter_data),
1198 file: None,
1199 diagnostics: Default::default(),
1200 remote_selections: Default::default(),
1201 language,
1202 non_text_state_update_count: 0,
1203 capability: Capability::ReadOnly,
1204 modeline: None,
1205 }
1206 }
1207
1208 /// Retrieve a snapshot of the buffer's current state. This is computationally
1209 /// cheap, and allows reading from the buffer on a background thread.
1210 pub fn snapshot(&self) -> BufferSnapshot {
1211 let text = self.text.snapshot();
1212
1213 let syntax = {
1214 let mut syntax_map = self.syntax_map.lock();
1215 syntax_map.interpolate(text);
1216 syntax_map.snapshot()
1217 };
1218
1219 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1220 Arc::new(TreeSitterData::new(text))
1221 } else {
1222 self.tree_sitter_data.clone()
1223 };
1224
1225 BufferSnapshot {
1226 text: text.clone(),
1227 syntax,
1228 tree_sitter_data,
1229 file: self.file.clone(),
1230 remote_selections: self.remote_selections.clone(),
1231 diagnostics: self.diagnostics.clone(),
1232 language: self.language.clone(),
1233 non_text_state_update_count: self.non_text_state_update_count,
1234 capability: self.capability,
1235 modeline: self.modeline.clone(),
1236 }
1237 }
1238
1239 #[ztracing::instrument(skip_all)]
1240 pub fn preview_edits(
1241 &self,
1242 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1243 cx: &App,
1244 ) -> Task<EditPreview> {
1245 let registry = self.language_registry();
1246 let language = self.language().cloned();
1247 let old_snapshot = self.text.snapshot().clone();
1248 let new_snapshot = self.text.snapshot_with_edits(edits.iter().cloned());
1249 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1250 cx.background_spawn(async move {
1251 if !edits.is_empty() {
1252 if let Some(language) = language.clone() {
1253 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1254 }
1255
1256 syntax_snapshot.interpolate(&new_snapshot);
1257
1258 if let Some(language) = language {
1259 syntax_snapshot.reparse(&new_snapshot, registry, language);
1260 }
1261 }
1262 EditPreview {
1263 old_snapshot,
1264 applied_edits_snapshot: new_snapshot,
1265 syntax_snapshot,
1266 }
1267 })
1268 }
1269
1270 pub fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1271 &self.text
1272 }
1273
1274 /// Retrieve a snapshot of the buffer's raw text, without any
1275 /// language-related state like the syntax tree or diagnostics.
1276 #[ztracing::instrument(skip_all)]
1277 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1278 // todo lw
1279 self.text.snapshot().clone()
1280 }
1281
1282 /// The file associated with the buffer, if any.
1283 pub fn file(&self) -> Option<&Arc<dyn File>> {
1284 self.file.as_ref()
1285 }
1286
1287 /// The version of the buffer that was last saved or reloaded from disk.
1288 pub fn saved_version(&self) -> &clock::Global {
1289 &self.saved_version
1290 }
1291
1292 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1293 pub fn saved_mtime(&self) -> Option<MTime> {
1294 self.saved_mtime
1295 }
1296
1297 /// Returns the character encoding of the buffer's file.
1298 pub fn encoding(&self) -> &'static Encoding {
1299 self.encoding
1300 }
1301
1302 /// Sets the character encoding of the buffer.
1303 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1304 self.encoding = encoding;
1305 }
1306
1307 /// Returns whether the buffer has a Byte Order Mark.
1308 pub fn has_bom(&self) -> bool {
1309 self.has_bom
1310 }
1311
1312 /// Sets whether the buffer has a Byte Order Mark.
1313 pub fn set_has_bom(&mut self, has_bom: bool) {
1314 self.has_bom = has_bom;
1315 }
1316
1317 /// Assign a language to the buffer.
1318 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1319 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1320 }
1321
1322 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1323 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1324 self.set_language_(language, true, cx);
1325 }
1326
1327 #[ztracing::instrument(skip_all)]
1328 fn set_language_(
1329 &mut self,
1330 language: Option<Arc<Language>>,
1331 may_block: bool,
1332 cx: &mut Context<Self>,
1333 ) {
1334 if language == self.language {
1335 return;
1336 }
1337 self.non_text_state_update_count += 1;
1338 self.syntax_map.lock().clear(&self.text);
1339 let old_language = std::mem::replace(&mut self.language, language);
1340 self.was_changed();
1341 self.reparse(cx, may_block);
1342 let has_fresh_language =
1343 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1344 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1345 }
1346
1347 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1348 /// other languages if parts of the buffer are written in different languages.
1349 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1350 self.syntax_map
1351 .lock()
1352 .set_language_registry(language_registry);
1353 }
1354
1355 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1356 self.syntax_map.lock().language_registry()
1357 }
1358
1359 /// Assign the line ending type to the buffer.
1360 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1361 self.text.set_line_ending(line_ending);
1362
1363 let lamport_timestamp = self.text.lamport_clock.tick();
1364 self.send_operation(
1365 Operation::UpdateLineEnding {
1366 line_ending,
1367 lamport_timestamp,
1368 },
1369 true,
1370 cx,
1371 );
1372 }
1373
1374 /// Assign the buffer [`ModelineSettings`].
1375 pub fn set_modeline(&mut self, modeline: Option<ModelineSettings>) -> bool {
1376 if modeline.as_ref() != self.modeline.as_deref() {
1377 self.modeline = modeline.map(Arc::new);
1378 true
1379 } else {
1380 false
1381 }
1382 }
1383
1384 /// Returns the [`ModelineSettings`].
1385 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
1386 self.modeline.as_ref()
1387 }
1388
1389 /// Assign the buffer a new [`Capability`].
1390 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1391 if self.capability != capability {
1392 self.capability = capability;
1393 cx.emit(BufferEvent::CapabilityChanged)
1394 }
1395 }
1396
1397 /// This method is called to signal that the buffer has been saved.
1398 pub fn did_save(
1399 &mut self,
1400 version: clock::Global,
1401 mtime: Option<MTime>,
1402 cx: &mut Context<Self>,
1403 ) {
1404 self.saved_version = version.clone();
1405 self.has_unsaved_edits.set((version, false));
1406 self.has_conflict = false;
1407 self.saved_mtime = mtime;
1408 self.was_changed();
1409 cx.emit(BufferEvent::Saved);
1410 cx.notify();
1411 }
1412
1413 /// Reloads the contents of the buffer from disk.
1414 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1415 self.reload_impl(None, cx)
1416 }
1417
1418 /// Reloads the contents of the buffer from disk using the specified encoding.
1419 ///
1420 /// This bypasses automatic encoding detection heuristics (like BOM checks) for non-Unicode encodings,
1421 /// allowing users to force a specific interpretation of the bytes.
1422 pub fn reload_with_encoding(
1423 &mut self,
1424 encoding: &'static Encoding,
1425 cx: &Context<Self>,
1426 ) -> oneshot::Receiver<Option<Transaction>> {
1427 self.reload_impl(Some(encoding), cx)
1428 }
1429
1430 fn reload_impl(
1431 &mut self,
1432 force_encoding: Option<&'static Encoding>,
1433 cx: &Context<Self>,
1434 ) -> oneshot::Receiver<Option<Transaction>> {
1435 let (tx, rx) = futures::channel::oneshot::channel();
1436 let prev_version = self.text.version();
1437
1438 self.reload_task = Some(cx.spawn(async move |this, cx| {
1439 let Some((new_mtime, load_bytes_task, current_encoding)) =
1440 this.update(cx, |this, cx| {
1441 let file = this.file.as_ref()?.as_local()?;
1442 Some((
1443 file.disk_state().mtime(),
1444 file.load_bytes(cx),
1445 this.encoding,
1446 ))
1447 })?
1448 else {
1449 return Ok(());
1450 };
1451
1452 let target_encoding = force_encoding.unwrap_or(current_encoding);
1453
1454 let is_unicode = target_encoding == encoding_rs::UTF_8
1455 || target_encoding == encoding_rs::UTF_16LE
1456 || target_encoding == encoding_rs::UTF_16BE;
1457
1458 let (new_text, has_bom, encoding_used) = if force_encoding.is_some() && !is_unicode {
1459 let bytes = load_bytes_task.await?;
1460 let (cow, _had_errors) = target_encoding.decode_without_bom_handling(&bytes);
1461 (cow.into_owned(), false, target_encoding)
1462 } else {
1463 let bytes = load_bytes_task.await?;
1464 let (cow, used_enc, _had_errors) = target_encoding.decode(&bytes);
1465
1466 let actual_has_bom = if used_enc == encoding_rs::UTF_8 {
1467 bytes.starts_with(&[0xEF, 0xBB, 0xBF])
1468 } else if used_enc == encoding_rs::UTF_16LE {
1469 bytes.starts_with(&[0xFF, 0xFE])
1470 } else if used_enc == encoding_rs::UTF_16BE {
1471 bytes.starts_with(&[0xFE, 0xFF])
1472 } else {
1473 false
1474 };
1475 (cow.into_owned(), actual_has_bom, used_enc)
1476 };
1477
1478 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1479 this.update(cx, |this, cx| {
1480 if this.version() == diff.base_version {
1481 this.finalize_last_transaction();
1482 let old_encoding = this.encoding;
1483 let old_has_bom = this.has_bom;
1484 this.apply_diff(diff, cx);
1485 this.encoding = encoding_used;
1486 this.has_bom = has_bom;
1487 let transaction = this.finalize_last_transaction().cloned();
1488 if let Some(ref txn) = transaction {
1489 if old_encoding != encoding_used || old_has_bom != has_bom {
1490 this.reload_with_encoding_txns
1491 .insert(txn.id, (old_encoding, old_has_bom));
1492 }
1493 }
1494 tx.send(transaction).ok();
1495 this.has_conflict = false;
1496 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1497 } else {
1498 if !diff.edits.is_empty()
1499 || this
1500 .edits_since::<usize>(&diff.base_version)
1501 .next()
1502 .is_some()
1503 {
1504 this.has_conflict = true;
1505 }
1506
1507 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1508 }
1509
1510 this.reload_task.take();
1511 })
1512 }));
1513 rx
1514 }
1515
1516 /// This method is called to signal that the buffer has been reloaded.
1517 pub fn did_reload(
1518 &mut self,
1519 version: clock::Global,
1520 line_ending: LineEnding,
1521 mtime: Option<MTime>,
1522 cx: &mut Context<Self>,
1523 ) {
1524 self.saved_version = version;
1525 self.has_unsaved_edits
1526 .set((self.saved_version.clone(), false));
1527 self.text.set_line_ending(line_ending);
1528 self.saved_mtime = mtime;
1529 cx.emit(BufferEvent::Reloaded);
1530 cx.notify();
1531 }
1532
1533 /// Updates the [`File`] backing this buffer. This should be called when
1534 /// the file has changed or has been deleted.
1535 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1536 let was_dirty = self.is_dirty();
1537 let mut file_changed = false;
1538
1539 if let Some(old_file) = self.file.as_ref() {
1540 if new_file.path() != old_file.path() {
1541 file_changed = true;
1542 }
1543
1544 let old_state = old_file.disk_state();
1545 let new_state = new_file.disk_state();
1546 if old_state != new_state {
1547 file_changed = true;
1548 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1549 cx.emit(BufferEvent::ReloadNeeded)
1550 }
1551 }
1552 } else {
1553 file_changed = true;
1554 };
1555
1556 self.file = Some(new_file);
1557 if file_changed {
1558 self.was_changed();
1559 self.non_text_state_update_count += 1;
1560 if was_dirty != self.is_dirty() {
1561 cx.emit(BufferEvent::DirtyChanged);
1562 }
1563 cx.emit(BufferEvent::FileHandleChanged);
1564 cx.notify();
1565 }
1566 }
1567
1568 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1569 pub fn language(&self) -> Option<&Arc<Language>> {
1570 self.language.as_ref()
1571 }
1572
1573 /// Returns the [`Language`] at the given location.
1574 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1575 let offset = position.to_offset(self);
1576 let text: &TextBufferSnapshot = &self.text;
1577 self.syntax_map
1578 .lock()
1579 .layers_for_range(offset..offset, text, false)
1580 .filter(|layer| {
1581 layer
1582 .included_sub_ranges
1583 .is_none_or(|ranges| offset_in_sub_ranges(ranges, offset, text))
1584 })
1585 .last()
1586 .map(|info| info.language.clone())
1587 .or_else(|| self.language.clone())
1588 }
1589
1590 /// Returns each [`Language`] for the active syntax layers at the given location.
1591 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1592 let offset = position.to_offset(self);
1593 let text: &TextBufferSnapshot = &self.text;
1594 let mut languages: Vec<Arc<Language>> = self
1595 .syntax_map
1596 .lock()
1597 .layers_for_range(offset..offset, text, false)
1598 .filter(|layer| {
1599 // For combined injections, check if offset is within the actual sub-ranges.
1600 layer
1601 .included_sub_ranges
1602 .is_none_or(|ranges| offset_in_sub_ranges(ranges, offset, text))
1603 })
1604 .map(|info| info.language.clone())
1605 .collect();
1606
1607 if languages.is_empty()
1608 && let Some(buffer_language) = self.language()
1609 {
1610 languages.push(buffer_language.clone());
1611 }
1612
1613 languages
1614 }
1615
1616 /// An integer version number that accounts for all updates besides
1617 /// the buffer's text itself (which is versioned via a version vector).
1618 pub fn non_text_state_update_count(&self) -> usize {
1619 self.non_text_state_update_count
1620 }
1621
1622 /// Whether the buffer is being parsed in the background.
1623 #[cfg(any(test, feature = "test-support"))]
1624 pub fn is_parsing(&self) -> bool {
1625 self.reparse.is_some()
1626 }
1627
1628 /// Indicates whether the buffer contains any regions that may be
1629 /// written in a language that hasn't been loaded yet.
1630 pub fn contains_unknown_injections(&self) -> bool {
1631 self.syntax_map.lock().contains_unknown_injections()
1632 }
1633
1634 /// Sets the sync parse timeout for this buffer.
1635 ///
1636 /// Setting this to `None` disables sync parsing entirely.
1637 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1638 self.sync_parse_timeout = timeout;
1639 }
1640
1641 fn invalidate_tree_sitter_data(
1642 tree_sitter_data: &mut Arc<TreeSitterData>,
1643 snapshot: &text::BufferSnapshot,
1644 ) {
1645 match Arc::get_mut(tree_sitter_data) {
1646 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1647 None => {
1648 let new_tree_sitter_data = TreeSitterData::new(snapshot);
1649 *tree_sitter_data = Arc::new(new_tree_sitter_data)
1650 }
1651 }
1652 }
1653
1654 /// Called after an edit to synchronize the buffer's main parse tree with
1655 /// the buffer's new underlying state.
1656 ///
1657 /// Locks the syntax map and interpolates the edits since the last reparse
1658 /// into the foreground syntax tree.
1659 ///
1660 /// Then takes a stable snapshot of the syntax map before unlocking it.
1661 /// The snapshot with the interpolated edits is sent to a background thread,
1662 /// where we ask Tree-sitter to perform an incremental parse.
1663 ///
1664 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1665 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1666 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1667 ///
1668 /// If we time out waiting on the parse, we spawn a second task waiting
1669 /// until the parse does complete and return with the interpolated tree still
1670 /// in the foreground. When the background parse completes, call back into
1671 /// the main thread and assign the foreground parse state.
1672 ///
1673 /// If the buffer or grammar changed since the start of the background parse,
1674 /// initiate an additional reparse recursively. To avoid concurrent parses
1675 /// for the same buffer, we only initiate a new parse if we are not already
1676 /// parsing in the background.
1677 #[ztracing::instrument(skip_all)]
1678 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1679 if self.text.version() != *self.tree_sitter_data.version() {
1680 Self::invalidate_tree_sitter_data(&mut self.tree_sitter_data, self.text.snapshot());
1681 }
1682 if self.reparse.is_some() {
1683 return;
1684 }
1685 let language = if let Some(language) = self.language.clone() {
1686 language
1687 } else {
1688 return;
1689 };
1690
1691 let text = self.text_snapshot();
1692 let parsed_version = self.version();
1693
1694 let mut syntax_map = self.syntax_map.lock();
1695 syntax_map.interpolate(&text);
1696 let language_registry = syntax_map.language_registry();
1697 let mut syntax_snapshot = syntax_map.snapshot();
1698 drop(syntax_map);
1699
1700 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1701 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1702 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1703 &text,
1704 language_registry.clone(),
1705 language.clone(),
1706 sync_parse_timeout,
1707 ) {
1708 self.did_finish_parsing(syntax_snapshot, Some(Duration::from_millis(300)), cx);
1709 self.reparse = None;
1710 return;
1711 }
1712 }
1713
1714 let parse_task = cx.background_spawn({
1715 let language = language.clone();
1716 let language_registry = language_registry.clone();
1717 async move {
1718 syntax_snapshot.reparse(&text, language_registry, language);
1719 syntax_snapshot
1720 }
1721 });
1722
1723 self.reparse = Some(cx.spawn(async move |this, cx| {
1724 let new_syntax_map = parse_task.await;
1725 this.update(cx, move |this, cx| {
1726 let grammar_changed = || {
1727 this.language
1728 .as_ref()
1729 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1730 };
1731 let language_registry_changed = || {
1732 new_syntax_map.contains_unknown_injections()
1733 && language_registry.is_some_and(|registry| {
1734 registry.version() != new_syntax_map.language_registry_version()
1735 })
1736 };
1737 let parse_again = this.version.changed_since(&parsed_version)
1738 || language_registry_changed()
1739 || grammar_changed();
1740 this.did_finish_parsing(new_syntax_map, None, cx);
1741 this.reparse = None;
1742 if parse_again {
1743 this.reparse(cx, false);
1744 }
1745 })
1746 .ok();
1747 }));
1748 }
1749
1750 fn did_finish_parsing(
1751 &mut self,
1752 syntax_snapshot: SyntaxSnapshot,
1753 block_budget: Option<Duration>,
1754 cx: &mut Context<Self>,
1755 ) {
1756 self.non_text_state_update_count += 1;
1757 self.syntax_map.lock().did_parse(syntax_snapshot);
1758 self.was_changed();
1759 self.request_autoindent(cx, block_budget);
1760 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1761 Self::invalidate_tree_sitter_data(&mut self.tree_sitter_data, &self.text.snapshot());
1762 cx.emit(BufferEvent::Reparsed);
1763 cx.notify();
1764 }
1765
1766 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1767 self.parse_status.1.clone()
1768 }
1769
1770 /// Wait until the buffer is no longer parsing
1771 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1772 let mut parse_status = self.parse_status();
1773 async move {
1774 while *parse_status.borrow() != ParseStatus::Idle {
1775 if parse_status.changed().await.is_err() {
1776 break;
1777 }
1778 }
1779 }
1780 }
1781
1782 /// Assign to the buffer a set of diagnostics created by a given language server.
1783 pub fn update_diagnostics(
1784 &mut self,
1785 server_id: LanguageServerId,
1786 diagnostics: DiagnosticSet,
1787 cx: &mut Context<Self>,
1788 ) {
1789 let lamport_timestamp = self.text.lamport_clock.tick();
1790 let op = Operation::UpdateDiagnostics {
1791 server_id,
1792 diagnostics: diagnostics.iter().cloned().collect(),
1793 lamport_timestamp,
1794 };
1795
1796 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1797 self.send_operation(op, true, cx);
1798 }
1799
1800 pub fn buffer_diagnostics(
1801 &self,
1802 for_server: Option<LanguageServerId>,
1803 ) -> Vec<&DiagnosticEntry<Anchor>> {
1804 match for_server {
1805 Some(server_id) => self
1806 .diagnostics
1807 .get(&server_id)
1808 .map_or_else(Vec::new, |diagnostics| diagnostics.iter().collect()),
1809 None => self
1810 .diagnostics
1811 .iter()
1812 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1813 .collect(),
1814 }
1815 }
1816
1817 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Option<Duration>) {
1818 if let Some(indent_sizes) = self.compute_autoindents() {
1819 let indent_sizes = cx.background_spawn(indent_sizes);
1820 let Some(block_budget) = block_budget else {
1821 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1822 let indent_sizes = indent_sizes.await;
1823 this.update(cx, |this, cx| {
1824 this.apply_autoindents(indent_sizes, cx);
1825 })
1826 .ok();
1827 }));
1828 return;
1829 };
1830 match cx
1831 .foreground_executor()
1832 .block_with_timeout(block_budget, indent_sizes)
1833 {
1834 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1835 Err(indent_sizes) => {
1836 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1837 let indent_sizes = indent_sizes.await;
1838 this.update(cx, |this, cx| {
1839 this.apply_autoindents(indent_sizes, cx);
1840 })
1841 .ok();
1842 }));
1843 }
1844 }
1845 } else {
1846 self.autoindent_requests.clear();
1847 for tx in self.wait_for_autoindent_txs.drain(..) {
1848 tx.send(()).ok();
1849 }
1850 }
1851 }
1852
1853 fn compute_autoindents(
1854 &self,
1855 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1856 let max_rows_between_yields = 100;
1857 let snapshot = self.snapshot();
1858 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1859 return None;
1860 }
1861
1862 let autoindent_requests = self.autoindent_requests.clone();
1863 Some(async move {
1864 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1865 for request in autoindent_requests {
1866 // Resolve each edited range to its row in the current buffer and in the
1867 // buffer before this batch of edits.
1868 let mut row_ranges = Vec::new();
1869 let mut old_to_new_rows = BTreeMap::new();
1870 let mut language_indent_sizes_by_new_row = Vec::new();
1871 for entry in &request.entries {
1872 let position = entry.range.start;
1873 let new_row = position.to_point(&snapshot).row;
1874 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1875 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1876
1877 if let Some(old_row) = entry.old_row {
1878 old_to_new_rows.insert(old_row, new_row);
1879 }
1880 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1881 }
1882
1883 // Build a map containing the suggested indentation for each of the edited lines
1884 // with respect to the state of the buffer before these edits. This map is keyed
1885 // by the rows for these lines in the current state of the buffer.
1886 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1887 let old_edited_ranges =
1888 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1889 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1890 let mut language_indent_size = IndentSize::default();
1891 for old_edited_range in old_edited_ranges {
1892 let suggestions = request
1893 .before_edit
1894 .suggest_autoindents(old_edited_range.clone())
1895 .into_iter()
1896 .flatten();
1897 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1898 if let Some(suggestion) = suggestion {
1899 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1900
1901 // Find the indent size based on the language for this row.
1902 while let Some((row, size)) = language_indent_sizes.peek() {
1903 if *row > new_row {
1904 break;
1905 }
1906 language_indent_size = *size;
1907 language_indent_sizes.next();
1908 }
1909
1910 let suggested_indent = old_to_new_rows
1911 .get(&suggestion.basis_row)
1912 .and_then(|from_row| {
1913 Some(old_suggestions.get(from_row).copied()?.0)
1914 })
1915 .unwrap_or_else(|| {
1916 request
1917 .before_edit
1918 .indent_size_for_line(suggestion.basis_row)
1919 })
1920 .with_delta(suggestion.delta, language_indent_size);
1921 old_suggestions
1922 .insert(new_row, (suggested_indent, suggestion.within_error));
1923 }
1924 }
1925 yield_now().await;
1926 }
1927
1928 // Compute new suggestions for each line, but only include them in the result
1929 // if they differ from the old suggestion for that line.
1930 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1931 let mut language_indent_size = IndentSize::default();
1932 for (row_range, original_indent_column) in row_ranges {
1933 let new_edited_row_range = if request.is_block_mode {
1934 row_range.start..row_range.start + 1
1935 } else {
1936 row_range.clone()
1937 };
1938
1939 let suggestions = snapshot
1940 .suggest_autoindents(new_edited_row_range.clone())
1941 .into_iter()
1942 .flatten();
1943 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
1944 if let Some(suggestion) = suggestion {
1945 // Find the indent size based on the language for this row.
1946 while let Some((row, size)) = language_indent_sizes.peek() {
1947 if *row > new_row {
1948 break;
1949 }
1950 language_indent_size = *size;
1951 language_indent_sizes.next();
1952 }
1953
1954 let suggested_indent = indent_sizes
1955 .get(&suggestion.basis_row)
1956 .copied()
1957 .map(|e| e.0)
1958 .unwrap_or_else(|| {
1959 snapshot.indent_size_for_line(suggestion.basis_row)
1960 })
1961 .with_delta(suggestion.delta, language_indent_size);
1962
1963 if old_suggestions.get(&new_row).is_none_or(
1964 |(old_indentation, was_within_error)| {
1965 suggested_indent != *old_indentation
1966 && (!suggestion.within_error || *was_within_error)
1967 },
1968 ) {
1969 indent_sizes.insert(
1970 new_row,
1971 (suggested_indent, request.ignore_empty_lines),
1972 );
1973 }
1974 }
1975 }
1976
1977 if let (true, Some(original_indent_column)) =
1978 (request.is_block_mode, original_indent_column)
1979 {
1980 let new_indent =
1981 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
1982 *indent
1983 } else {
1984 snapshot.indent_size_for_line(row_range.start)
1985 };
1986 let delta = new_indent.len as i64 - original_indent_column as i64;
1987 if delta != 0 {
1988 for row in row_range.skip(1) {
1989 indent_sizes.entry(row).or_insert_with(|| {
1990 let mut size = snapshot.indent_size_for_line(row);
1991 if size.kind == new_indent.kind {
1992 match delta.cmp(&0) {
1993 Ordering::Greater => size.len += delta as u32,
1994 Ordering::Less => {
1995 size.len = size.len.saturating_sub(-delta as u32)
1996 }
1997 Ordering::Equal => {}
1998 }
1999 }
2000 (size, request.ignore_empty_lines)
2001 });
2002 }
2003 }
2004 }
2005
2006 yield_now().await;
2007 }
2008 }
2009
2010 indent_sizes
2011 .into_iter()
2012 .filter_map(|(row, (indent, ignore_empty_lines))| {
2013 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2014 None
2015 } else {
2016 Some((row, indent))
2017 }
2018 })
2019 .collect()
2020 })
2021 }
2022
2023 fn apply_autoindents(
2024 &mut self,
2025 indent_sizes: BTreeMap<u32, IndentSize>,
2026 cx: &mut Context<Self>,
2027 ) {
2028 self.autoindent_requests.clear();
2029 for tx in self.wait_for_autoindent_txs.drain(..) {
2030 tx.send(()).ok();
2031 }
2032
2033 let edits: Vec<_> = indent_sizes
2034 .into_iter()
2035 .filter_map(|(row, indent_size)| {
2036 let current_size = indent_size_for_line(self, row);
2037 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2038 })
2039 .collect();
2040
2041 let preserve_preview = self.preserve_preview();
2042 self.edit(edits, None, cx);
2043 if preserve_preview {
2044 self.refresh_preview();
2045 }
2046 }
2047
2048 /// Create a minimal edit that will cause the given row to be indented
2049 /// with the given size. After applying this edit, the length of the line
2050 /// will always be at least `new_size.len`.
2051 pub fn edit_for_indent_size_adjustment(
2052 row: u32,
2053 current_size: IndentSize,
2054 new_size: IndentSize,
2055 ) -> Option<(Range<Point>, String)> {
2056 if new_size.kind == current_size.kind {
2057 match new_size.len.cmp(¤t_size.len) {
2058 Ordering::Greater => {
2059 let point = Point::new(row, 0);
2060 Some((
2061 point..point,
2062 iter::repeat(new_size.char())
2063 .take((new_size.len - current_size.len) as usize)
2064 .collect::<String>(),
2065 ))
2066 }
2067
2068 Ordering::Less => Some((
2069 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2070 String::new(),
2071 )),
2072
2073 Ordering::Equal => None,
2074 }
2075 } else {
2076 Some((
2077 Point::new(row, 0)..Point::new(row, current_size.len),
2078 iter::repeat(new_size.char())
2079 .take(new_size.len as usize)
2080 .collect::<String>(),
2081 ))
2082 }
2083 }
2084
2085 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2086 /// and the given new text.
2087 pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
2088 where
2089 T: AsRef<str> + Send + 'static,
2090 {
2091 let old_text = self.as_rope().clone();
2092 let base_version = self.version();
2093 cx.background_spawn(async move {
2094 let old_text = old_text.to_string();
2095 let mut new_text = new_text.as_ref().to_owned();
2096 let line_ending = LineEnding::detect(&new_text);
2097 LineEnding::normalize(&mut new_text);
2098 let edits = text_diff(&old_text, &new_text);
2099 Diff {
2100 base_version,
2101 line_ending,
2102 edits,
2103 }
2104 })
2105 }
2106
2107 /// Spawns a background task that searches the buffer for any whitespace
2108 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2109 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2110 let old_text = self.as_rope().clone();
2111 let line_ending = self.line_ending();
2112 let base_version = self.version();
2113 cx.background_spawn(async move {
2114 let ranges = trailing_whitespace_ranges(&old_text);
2115 let empty = Arc::<str>::from("");
2116 Diff {
2117 base_version,
2118 line_ending,
2119 edits: ranges
2120 .into_iter()
2121 .map(|range| (range, empty.clone()))
2122 .collect(),
2123 }
2124 })
2125 }
2126
2127 /// Ensures that the buffer ends with a single newline character, and
2128 /// no other whitespace. Skips if the buffer is empty.
2129 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2130 let len = self.len();
2131 if len == 0 {
2132 return;
2133 }
2134 let mut offset = len;
2135 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2136 let non_whitespace_len = chunk
2137 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2138 .len();
2139 offset -= chunk.len();
2140 offset += non_whitespace_len;
2141 if non_whitespace_len != 0 {
2142 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2143 return;
2144 }
2145 break;
2146 }
2147 }
2148 self.edit([(offset..len, "\n")], None, cx);
2149 }
2150
2151 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2152 /// calculated, then adjust the diff to account for those changes, and discard any
2153 /// parts of the diff that conflict with those changes.
2154 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2155 let snapshot = self.snapshot();
2156 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2157 let mut delta = 0;
2158 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2159 while let Some(edit_since) = edits_since.peek() {
2160 // If the edit occurs after a diff hunk, then it does not
2161 // affect that hunk.
2162 if edit_since.old.start > range.end {
2163 break;
2164 }
2165 // If the edit precedes the diff hunk, then adjust the hunk
2166 // to reflect the edit.
2167 else if edit_since.old.end < range.start {
2168 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2169 edits_since.next();
2170 }
2171 // If the edit intersects a diff hunk, then discard that hunk.
2172 else {
2173 return None;
2174 }
2175 }
2176
2177 let start = (range.start as i64 + delta) as usize;
2178 let end = (range.end as i64 + delta) as usize;
2179 Some((start..end, new_text))
2180 });
2181
2182 self.start_transaction();
2183 self.text.set_line_ending(diff.line_ending);
2184 self.edit(adjusted_edits, None, cx);
2185 self.end_transaction(cx)
2186 }
2187
2188 pub fn has_unsaved_edits(&self) -> bool {
2189 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2190
2191 if last_version == self.version {
2192 self.has_unsaved_edits
2193 .set((last_version, has_unsaved_edits));
2194 return has_unsaved_edits;
2195 }
2196
2197 let has_edits = self.has_edits_since(&self.saved_version);
2198 self.has_unsaved_edits
2199 .set((self.version.clone(), has_edits));
2200 has_edits
2201 }
2202
2203 /// Checks if the buffer has unsaved changes.
2204 pub fn is_dirty(&self) -> bool {
2205 if self.capability == Capability::ReadOnly {
2206 return false;
2207 }
2208 if self.has_conflict {
2209 return true;
2210 }
2211 match self.file.as_ref().map(|f| f.disk_state()) {
2212 Some(DiskState::New) | Some(DiskState::Deleted) => {
2213 !self.is_empty() && self.has_unsaved_edits()
2214 }
2215 _ => self.has_unsaved_edits(),
2216 }
2217 }
2218
2219 /// Marks the buffer as having a conflict regardless of current buffer state.
2220 pub fn set_conflict(&mut self) {
2221 self.has_conflict = true;
2222 }
2223
2224 /// Checks if the buffer and its file have both changed since the buffer
2225 /// was last saved or reloaded.
2226 pub fn has_conflict(&self) -> bool {
2227 if self.has_conflict {
2228 return true;
2229 }
2230 let Some(file) = self.file.as_ref() else {
2231 return false;
2232 };
2233 match file.disk_state() {
2234 DiskState::New => false,
2235 DiskState::Present { mtime, .. } => match self.saved_mtime {
2236 Some(saved_mtime) => {
2237 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2238 }
2239 None => true,
2240 },
2241 DiskState::Deleted => false,
2242 DiskState::Historic { .. } => false,
2243 }
2244 }
2245
2246 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2247 pub fn subscribe(&mut self) -> Subscription<usize> {
2248 self.text.subscribe()
2249 }
2250
2251 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2252 ///
2253 /// This allows downstream code to check if the buffer's text has changed without
2254 /// waiting for an effect cycle, which would be required if using eents.
2255 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2256 if let Err(ix) = self
2257 .change_bits
2258 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2259 {
2260 self.change_bits.insert(ix, bit);
2261 }
2262 }
2263
2264 /// Set the change bit for all "listeners".
2265 fn was_changed(&mut self) {
2266 self.change_bits.retain(|change_bit| {
2267 change_bit
2268 .upgrade()
2269 .inspect(|bit| {
2270 _ = bit.replace(true);
2271 })
2272 .is_some()
2273 });
2274 }
2275
2276 /// Starts a transaction, if one is not already in-progress. When undoing or
2277 /// redoing edits, all of the edits performed within a transaction are undone
2278 /// or redone together.
2279 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2280 self.start_transaction_at(Instant::now())
2281 }
2282
2283 /// Starts a transaction, providing the current time. Subsequent transactions
2284 /// that occur within a short period of time will be grouped together. This
2285 /// is controlled by the buffer's undo grouping duration.
2286 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2287 self.transaction_depth += 1;
2288 if self.was_dirty_before_starting_transaction.is_none() {
2289 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2290 }
2291 self.text.start_transaction_at(now)
2292 }
2293
2294 /// Terminates the current transaction, if this is the outermost transaction.
2295 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2296 self.end_transaction_at(Instant::now(), cx)
2297 }
2298
2299 /// Terminates the current transaction, providing the current time. Subsequent transactions
2300 /// that occur within a short period of time will be grouped together. This
2301 /// is controlled by the buffer's undo grouping duration.
2302 pub fn end_transaction_at(
2303 &mut self,
2304 now: Instant,
2305 cx: &mut Context<Self>,
2306 ) -> Option<TransactionId> {
2307 assert!(self.transaction_depth > 0);
2308 self.transaction_depth -= 1;
2309 let was_dirty = if self.transaction_depth == 0 {
2310 self.was_dirty_before_starting_transaction.take().unwrap()
2311 } else {
2312 false
2313 };
2314 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2315 self.did_edit(&start_version, was_dirty, true, cx);
2316 Some(transaction_id)
2317 } else {
2318 None
2319 }
2320 }
2321
2322 /// Manually add a transaction to the buffer's undo history.
2323 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2324 self.text.push_transaction(transaction, now);
2325 }
2326
2327 /// Differs from `push_transaction` in that it does not clear the redo
2328 /// stack. Intended to be used to create a parent transaction to merge
2329 /// potential child transactions into.
2330 ///
2331 /// The caller is responsible for removing it from the undo history using
2332 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2333 /// are merged into this transaction, the caller is responsible for ensuring
2334 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2335 /// cleared is to create transactions with the usual `start_transaction` and
2336 /// `end_transaction` methods and merging the resulting transactions into
2337 /// the transaction created by this method
2338 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2339 self.text.push_empty_transaction(now)
2340 }
2341
2342 /// Prevent the last transaction from being grouped with any subsequent transactions,
2343 /// even if they occur with the buffer's undo grouping duration.
2344 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2345 self.text.finalize_last_transaction()
2346 }
2347
2348 /// Manually group all changes since a given transaction.
2349 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2350 self.text.group_until_transaction(transaction_id);
2351 }
2352
2353 /// Manually remove a transaction from the buffer's undo history
2354 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2355 self.text.forget_transaction(transaction_id)
2356 }
2357
2358 /// Retrieve a transaction from the buffer's undo history
2359 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2360 self.text.get_transaction(transaction_id)
2361 }
2362
2363 /// Manually merge two transactions in the buffer's undo history.
2364 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2365 self.text.merge_transactions(transaction, destination);
2366 }
2367
2368 /// Waits for the buffer to receive operations with the given timestamps.
2369 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2370 &mut self,
2371 edit_ids: It,
2372 ) -> impl Future<Output = Result<()>> + use<It> {
2373 self.text.wait_for_edits(edit_ids)
2374 }
2375
2376 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2377 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2378 &mut self,
2379 anchors: It,
2380 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2381 self.text.wait_for_anchors(anchors)
2382 }
2383
2384 /// Waits for the buffer to receive operations up to the given version.
2385 pub fn wait_for_version(
2386 &mut self,
2387 version: clock::Global,
2388 ) -> impl Future<Output = Result<()>> + use<> {
2389 self.text.wait_for_version(version)
2390 }
2391
2392 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2393 /// [`Buffer::wait_for_version`] to resolve with an error.
2394 pub fn give_up_waiting(&mut self) {
2395 self.text.give_up_waiting();
2396 }
2397
2398 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2399 let mut rx = None;
2400 if !self.autoindent_requests.is_empty() {
2401 let channel = oneshot::channel();
2402 self.wait_for_autoindent_txs.push(channel.0);
2403 rx = Some(channel.1);
2404 }
2405 rx
2406 }
2407
2408 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2409 pub fn set_active_selections(
2410 &mut self,
2411 selections: Arc<[Selection<Anchor>]>,
2412 line_mode: bool,
2413 cursor_shape: CursorShape,
2414 cx: &mut Context<Self>,
2415 ) {
2416 let lamport_timestamp = self.text.lamport_clock.tick();
2417 self.remote_selections.insert(
2418 self.text.replica_id(),
2419 SelectionSet {
2420 selections: selections.clone(),
2421 lamport_timestamp,
2422 line_mode,
2423 cursor_shape,
2424 },
2425 );
2426 self.send_operation(
2427 Operation::UpdateSelections {
2428 selections,
2429 line_mode,
2430 lamport_timestamp,
2431 cursor_shape,
2432 },
2433 true,
2434 cx,
2435 );
2436 self.non_text_state_update_count += 1;
2437 cx.notify();
2438 }
2439
2440 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2441 /// this replica.
2442 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2443 if self
2444 .remote_selections
2445 .get(&self.text.replica_id())
2446 .is_none_or(|set| !set.selections.is_empty())
2447 {
2448 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2449 }
2450 }
2451
2452 pub fn set_agent_selections(
2453 &mut self,
2454 selections: Arc<[Selection<Anchor>]>,
2455 line_mode: bool,
2456 cursor_shape: CursorShape,
2457 cx: &mut Context<Self>,
2458 ) {
2459 let lamport_timestamp = self.text.lamport_clock.tick();
2460 self.remote_selections.insert(
2461 ReplicaId::AGENT,
2462 SelectionSet {
2463 selections,
2464 lamport_timestamp,
2465 line_mode,
2466 cursor_shape,
2467 },
2468 );
2469 self.non_text_state_update_count += 1;
2470 cx.notify();
2471 }
2472
2473 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2474 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2475 }
2476
2477 /// Replaces the buffer's entire text.
2478 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2479 where
2480 T: Into<Arc<str>>,
2481 {
2482 self.autoindent_requests.clear();
2483 self.edit([(0..self.len(), text)], None, cx)
2484 }
2485
2486 /// Appends the given text to the end of the buffer.
2487 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2488 where
2489 T: Into<Arc<str>>,
2490 {
2491 self.edit([(self.len()..self.len(), text)], None, cx)
2492 }
2493
2494 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2495 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2496 ///
2497 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2498 /// request for the edited ranges, which will be processed when the buffer finishes
2499 /// parsing.
2500 ///
2501 /// Parsing takes place at the end of a transaction, and may compute synchronously
2502 /// or asynchronously, depending on the changes.
2503 pub fn edit<I, S, T>(
2504 &mut self,
2505 edits_iter: I,
2506 autoindent_mode: Option<AutoindentMode>,
2507 cx: &mut Context<Self>,
2508 ) -> Option<clock::Lamport>
2509 where
2510 I: IntoIterator<Item = (Range<S>, T)>,
2511 S: ToOffset,
2512 T: Into<Arc<str>>,
2513 {
2514 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2515 }
2516
2517 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2518 pub fn edit_non_coalesce<I, S, T>(
2519 &mut self,
2520 edits_iter: I,
2521 autoindent_mode: Option<AutoindentMode>,
2522 cx: &mut Context<Self>,
2523 ) -> Option<clock::Lamport>
2524 where
2525 I: IntoIterator<Item = (Range<S>, T)>,
2526 S: ToOffset,
2527 T: Into<Arc<str>>,
2528 {
2529 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2530 }
2531
2532 fn edit_internal<I, S, T>(
2533 &mut self,
2534 edits_iter: I,
2535 autoindent_mode: Option<AutoindentMode>,
2536 coalesce_adjacent: bool,
2537 cx: &mut Context<Self>,
2538 ) -> Option<clock::Lamport>
2539 where
2540 I: IntoIterator<Item = (Range<S>, T)>,
2541 S: ToOffset,
2542 T: Into<Arc<str>>,
2543 {
2544 // Skip invalid edits and coalesce contiguous ones.
2545 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2546
2547 for (range, new_text) in edits_iter {
2548 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2549
2550 if range.start > range.end {
2551 mem::swap(&mut range.start, &mut range.end);
2552 }
2553 let new_text = new_text.into();
2554 if !new_text.is_empty() || !range.is_empty() {
2555 let prev_edit = edits.last_mut();
2556 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2557 if coalesce_adjacent {
2558 prev_range.end >= range.start
2559 } else {
2560 prev_range.end > range.start
2561 }
2562 });
2563
2564 if let Some((prev_range, prev_text)) = prev_edit
2565 && should_coalesce
2566 {
2567 prev_range.end = cmp::max(prev_range.end, range.end);
2568 *prev_text = format!("{prev_text}{new_text}").into();
2569 } else {
2570 edits.push((range, new_text));
2571 }
2572 }
2573 }
2574 if edits.is_empty() {
2575 return None;
2576 }
2577
2578 self.start_transaction();
2579 self.pending_autoindent.take();
2580 let autoindent_request = autoindent_mode
2581 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2582
2583 let edit_operation = self.text.edit(edits.iter().cloned());
2584 let edit_id = edit_operation.timestamp();
2585
2586 if let Some((before_edit, mode)) = autoindent_request {
2587 let mut delta = 0isize;
2588 let mut previous_setting = None;
2589 let entries: Vec<_> = edits
2590 .into_iter()
2591 .enumerate()
2592 .zip(&edit_operation.as_edit().unwrap().new_text)
2593 .filter(|((_, (range, _)), _)| {
2594 let language = before_edit.language_at(range.start);
2595 let language_id = language.map(|l| l.id());
2596 if let Some((cached_language_id, apply_syntax_indent)) = previous_setting
2597 && cached_language_id == language_id
2598 {
2599 apply_syntax_indent
2600 } else {
2601 // The auto-indent setting is not present in editorconfigs, hence
2602 // we can avoid passing the file here.
2603 let auto_indent_mode = LanguageSettings::resolve(
2604 None,
2605 language.map(|l| l.name()).as_ref(),
2606 cx,
2607 )
2608 .auto_indent;
2609 let apply_syntax_indent = auto_indent_mode == AutoIndentMode::SyntaxAware;
2610 previous_setting = Some((language_id, apply_syntax_indent));
2611 apply_syntax_indent
2612 }
2613 })
2614 .map(|((ix, (range, _)), new_text)| {
2615 let new_text_length = new_text.len();
2616 let old_start = range.start.to_point(&before_edit);
2617 let new_start = (delta + range.start as isize) as usize;
2618 let range_len = range.end - range.start;
2619 delta += new_text_length as isize - range_len as isize;
2620
2621 // Decide what range of the insertion to auto-indent, and whether
2622 // the first line of the insertion should be considered a newly-inserted line
2623 // or an edit to an existing line.
2624 let mut range_of_insertion_to_indent = 0..new_text_length;
2625 let mut first_line_is_new = true;
2626
2627 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2628 let old_line_end = before_edit.line_len(old_start.row);
2629
2630 if old_start.column > old_line_start {
2631 first_line_is_new = false;
2632 }
2633
2634 if !new_text.contains('\n')
2635 && (old_start.column + (range_len as u32) < old_line_end
2636 || old_line_end == old_line_start)
2637 {
2638 first_line_is_new = false;
2639 }
2640
2641 // When inserting text starting with a newline, avoid auto-indenting the
2642 // previous line.
2643 if new_text.starts_with('\n') {
2644 range_of_insertion_to_indent.start += 1;
2645 first_line_is_new = true;
2646 }
2647
2648 let mut original_indent_column = None;
2649 if let AutoindentMode::Block {
2650 original_indent_columns,
2651 } = &mode
2652 {
2653 original_indent_column = Some(if new_text.starts_with('\n') {
2654 indent_size_for_text(
2655 new_text[range_of_insertion_to_indent.clone()].chars(),
2656 )
2657 .len
2658 } else {
2659 original_indent_columns
2660 .get(ix)
2661 .copied()
2662 .flatten()
2663 .unwrap_or_else(|| {
2664 indent_size_for_text(
2665 new_text[range_of_insertion_to_indent.clone()].chars(),
2666 )
2667 .len
2668 })
2669 });
2670
2671 // Avoid auto-indenting the line after the edit.
2672 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2673 range_of_insertion_to_indent.end -= 1;
2674 }
2675 }
2676
2677 AutoindentRequestEntry {
2678 original_indent_column,
2679 old_row: if first_line_is_new {
2680 None
2681 } else {
2682 Some(old_start.row)
2683 },
2684 indent_size: before_edit.language_indent_size_at(range.start, cx),
2685 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2686 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2687 }
2688 })
2689 .collect();
2690
2691 if !entries.is_empty() {
2692 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2693 before_edit,
2694 entries,
2695 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2696 ignore_empty_lines: false,
2697 }));
2698 }
2699 }
2700
2701 self.end_transaction(cx);
2702 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2703 Some(edit_id)
2704 }
2705
2706 fn did_edit(
2707 &mut self,
2708 old_version: &clock::Global,
2709 was_dirty: bool,
2710 is_local: bool,
2711 cx: &mut Context<Self>,
2712 ) {
2713 self.was_changed();
2714
2715 if self.edits_since::<usize>(old_version).next().is_none() {
2716 return;
2717 }
2718
2719 self.reparse(cx, true);
2720 cx.emit(BufferEvent::Edited { is_local });
2721 let is_dirty = self.is_dirty();
2722 if was_dirty != is_dirty {
2723 cx.emit(BufferEvent::DirtyChanged);
2724 }
2725 if was_dirty && !is_dirty {
2726 if let Some(file) = self.file.as_ref() {
2727 if matches!(file.disk_state(), DiskState::Present { .. })
2728 && file.disk_state().mtime() != self.saved_mtime
2729 {
2730 cx.emit(BufferEvent::ReloadNeeded);
2731 }
2732 }
2733 }
2734 cx.notify();
2735 }
2736
2737 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2738 where
2739 I: IntoIterator<Item = Range<T>>,
2740 T: ToOffset + Copy,
2741 {
2742 let before_edit = self.snapshot();
2743 let entries = ranges
2744 .into_iter()
2745 .map(|range| AutoindentRequestEntry {
2746 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2747 old_row: None,
2748 indent_size: before_edit.language_indent_size_at(range.start, cx),
2749 original_indent_column: None,
2750 })
2751 .collect();
2752 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2753 before_edit,
2754 entries,
2755 is_block_mode: false,
2756 ignore_empty_lines: true,
2757 }));
2758 self.request_autoindent(cx, Some(Duration::from_micros(300)));
2759 }
2760
2761 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2762 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2763 pub fn insert_empty_line(
2764 &mut self,
2765 position: impl ToPoint,
2766 space_above: bool,
2767 space_below: bool,
2768 cx: &mut Context<Self>,
2769 ) -> Point {
2770 let mut position = position.to_point(self);
2771
2772 self.start_transaction();
2773
2774 self.edit(
2775 [(position..position, "\n")],
2776 Some(AutoindentMode::EachLine),
2777 cx,
2778 );
2779
2780 if position.column > 0 {
2781 position += Point::new(1, 0);
2782 }
2783
2784 if !self.is_line_blank(position.row) {
2785 self.edit(
2786 [(position..position, "\n")],
2787 Some(AutoindentMode::EachLine),
2788 cx,
2789 );
2790 }
2791
2792 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2793 self.edit(
2794 [(position..position, "\n")],
2795 Some(AutoindentMode::EachLine),
2796 cx,
2797 );
2798 position.row += 1;
2799 }
2800
2801 if space_below
2802 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2803 {
2804 self.edit(
2805 [(position..position, "\n")],
2806 Some(AutoindentMode::EachLine),
2807 cx,
2808 );
2809 }
2810
2811 self.end_transaction(cx);
2812
2813 position
2814 }
2815
2816 /// Applies the given remote operations to the buffer.
2817 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2818 self.pending_autoindent.take();
2819 let was_dirty = self.is_dirty();
2820 let old_version = self.version.clone();
2821 let mut deferred_ops = Vec::new();
2822 let buffer_ops = ops
2823 .into_iter()
2824 .filter_map(|op| match op {
2825 Operation::Buffer(op) => Some(op),
2826 _ => {
2827 if self.can_apply_op(&op) {
2828 self.apply_op(op, cx);
2829 } else {
2830 deferred_ops.push(op);
2831 }
2832 None
2833 }
2834 })
2835 .collect::<Vec<_>>();
2836 for operation in buffer_ops.iter() {
2837 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2838 }
2839 self.text.apply_ops(buffer_ops);
2840 self.deferred_ops.insert(deferred_ops);
2841 self.flush_deferred_ops(cx);
2842 self.did_edit(&old_version, was_dirty, false, cx);
2843 // Notify independently of whether the buffer was edited as the operations could include a
2844 // selection update.
2845 cx.notify();
2846 }
2847
2848 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2849 let mut deferred_ops = Vec::new();
2850 for op in self.deferred_ops.drain().iter().cloned() {
2851 if self.can_apply_op(&op) {
2852 self.apply_op(op, cx);
2853 } else {
2854 deferred_ops.push(op);
2855 }
2856 }
2857 self.deferred_ops.insert(deferred_ops);
2858 }
2859
2860 pub fn has_deferred_ops(&self) -> bool {
2861 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2862 }
2863
2864 fn can_apply_op(&self, operation: &Operation) -> bool {
2865 match operation {
2866 Operation::Buffer(_) => {
2867 unreachable!("buffer operations should never be applied at this layer")
2868 }
2869 Operation::UpdateDiagnostics {
2870 diagnostics: diagnostic_set,
2871 ..
2872 } => diagnostic_set.iter().all(|diagnostic| {
2873 self.text.can_resolve(&diagnostic.range.start)
2874 && self.text.can_resolve(&diagnostic.range.end)
2875 }),
2876 Operation::UpdateSelections { selections, .. } => selections
2877 .iter()
2878 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2879 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2880 }
2881 }
2882
2883 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2884 match operation {
2885 Operation::Buffer(_) => {
2886 unreachable!("buffer operations should never be applied at this layer")
2887 }
2888 Operation::UpdateDiagnostics {
2889 server_id,
2890 diagnostics: diagnostic_set,
2891 lamport_timestamp,
2892 } => {
2893 let snapshot = self.snapshot();
2894 self.apply_diagnostic_update(
2895 server_id,
2896 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2897 lamport_timestamp,
2898 cx,
2899 );
2900 }
2901 Operation::UpdateSelections {
2902 selections,
2903 lamport_timestamp,
2904 line_mode,
2905 cursor_shape,
2906 } => {
2907 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2908 && set.lamport_timestamp > lamport_timestamp
2909 {
2910 return;
2911 }
2912
2913 self.remote_selections.insert(
2914 lamport_timestamp.replica_id,
2915 SelectionSet {
2916 selections,
2917 lamport_timestamp,
2918 line_mode,
2919 cursor_shape,
2920 },
2921 );
2922 self.text.lamport_clock.observe(lamport_timestamp);
2923 self.non_text_state_update_count += 1;
2924 }
2925 Operation::UpdateCompletionTriggers {
2926 triggers,
2927 lamport_timestamp,
2928 server_id,
2929 } => {
2930 if triggers.is_empty() {
2931 self.completion_triggers_per_language_server
2932 .remove(&server_id);
2933 self.completion_triggers = self
2934 .completion_triggers_per_language_server
2935 .values()
2936 .flat_map(|triggers| triggers.iter().cloned())
2937 .collect();
2938 } else {
2939 self.completion_triggers_per_language_server
2940 .insert(server_id, triggers.iter().cloned().collect());
2941 self.completion_triggers.extend(triggers);
2942 }
2943 self.text.lamport_clock.observe(lamport_timestamp);
2944 }
2945 Operation::UpdateLineEnding {
2946 line_ending,
2947 lamport_timestamp,
2948 } => {
2949 self.text.set_line_ending(line_ending);
2950 self.text.lamport_clock.observe(lamport_timestamp);
2951 }
2952 }
2953 }
2954
2955 fn apply_diagnostic_update(
2956 &mut self,
2957 server_id: LanguageServerId,
2958 diagnostics: DiagnosticSet,
2959 lamport_timestamp: clock::Lamport,
2960 cx: &mut Context<Self>,
2961 ) {
2962 if lamport_timestamp > self.diagnostics_timestamp {
2963 if diagnostics.is_empty() {
2964 self.diagnostics.remove(&server_id);
2965 } else {
2966 self.diagnostics.insert(server_id, diagnostics);
2967 }
2968 self.diagnostics_timestamp = lamport_timestamp;
2969 self.non_text_state_update_count += 1;
2970 self.text.lamport_clock.observe(lamport_timestamp);
2971 cx.notify();
2972 cx.emit(BufferEvent::DiagnosticsUpdated);
2973 }
2974 }
2975
2976 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
2977 self.was_changed();
2978 cx.emit(BufferEvent::Operation {
2979 operation,
2980 is_local,
2981 });
2982 }
2983
2984 /// Removes the selections for a given peer.
2985 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
2986 self.remote_selections.remove(&replica_id);
2987 cx.notify();
2988 }
2989
2990 /// Undoes the most recent transaction.
2991 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2992 let was_dirty = self.is_dirty();
2993 let old_version = self.version.clone();
2994
2995 if let Some((transaction_id, operation)) = self.text.undo() {
2996 self.send_operation(Operation::Buffer(operation), true, cx);
2997 self.did_edit(&old_version, was_dirty, true, cx);
2998 self.restore_encoding_for_transaction(transaction_id, was_dirty);
2999 Some(transaction_id)
3000 } else {
3001 None
3002 }
3003 }
3004
3005 /// Manually undoes a specific transaction in the buffer's undo history.
3006 pub fn undo_transaction(
3007 &mut self,
3008 transaction_id: TransactionId,
3009 cx: &mut Context<Self>,
3010 ) -> bool {
3011 let was_dirty = self.is_dirty();
3012 let old_version = self.version.clone();
3013 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3014 self.send_operation(Operation::Buffer(operation), true, cx);
3015 self.did_edit(&old_version, was_dirty, true, cx);
3016 true
3017 } else {
3018 false
3019 }
3020 }
3021
3022 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3023 pub fn undo_to_transaction(
3024 &mut self,
3025 transaction_id: TransactionId,
3026 cx: &mut Context<Self>,
3027 ) -> bool {
3028 let was_dirty = self.is_dirty();
3029 let old_version = self.version.clone();
3030
3031 let operations = self.text.undo_to_transaction(transaction_id);
3032 let undone = !operations.is_empty();
3033 for operation in operations {
3034 self.send_operation(Operation::Buffer(operation), true, cx);
3035 }
3036 if undone {
3037 self.did_edit(&old_version, was_dirty, true, cx)
3038 }
3039 undone
3040 }
3041
3042 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3043 let was_dirty = self.is_dirty();
3044 let operation = self.text.undo_operations(counts);
3045 let old_version = self.version.clone();
3046 self.send_operation(Operation::Buffer(operation), true, cx);
3047 self.did_edit(&old_version, was_dirty, true, cx);
3048 }
3049
3050 /// Manually redoes a specific transaction in the buffer's redo history.
3051 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3052 let was_dirty = self.is_dirty();
3053 let old_version = self.version.clone();
3054
3055 if let Some((transaction_id, operation)) = self.text.redo() {
3056 self.send_operation(Operation::Buffer(operation), true, cx);
3057 self.did_edit(&old_version, was_dirty, true, cx);
3058 self.restore_encoding_for_transaction(transaction_id, was_dirty);
3059 Some(transaction_id)
3060 } else {
3061 None
3062 }
3063 }
3064
3065 fn restore_encoding_for_transaction(&mut self, transaction_id: TransactionId, was_dirty: bool) {
3066 if let Some((old_encoding, old_has_bom)) =
3067 self.reload_with_encoding_txns.get(&transaction_id)
3068 {
3069 let current_encoding = self.encoding;
3070 let current_has_bom = self.has_bom;
3071 self.encoding = *old_encoding;
3072 self.has_bom = *old_has_bom;
3073 if !was_dirty {
3074 self.saved_version = self.version.clone();
3075 self.has_unsaved_edits
3076 .set((self.saved_version.clone(), false));
3077 }
3078 self.reload_with_encoding_txns
3079 .insert(transaction_id, (current_encoding, current_has_bom));
3080 }
3081 }
3082
3083 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3084 pub fn redo_to_transaction(
3085 &mut self,
3086 transaction_id: TransactionId,
3087 cx: &mut Context<Self>,
3088 ) -> bool {
3089 let was_dirty = self.is_dirty();
3090 let old_version = self.version.clone();
3091
3092 let operations = self.text.redo_to_transaction(transaction_id);
3093 let redone = !operations.is_empty();
3094 for operation in operations {
3095 self.send_operation(Operation::Buffer(operation), true, cx);
3096 }
3097 if redone {
3098 self.did_edit(&old_version, was_dirty, true, cx)
3099 }
3100 redone
3101 }
3102
3103 /// Override current completion triggers with the user-provided completion triggers.
3104 pub fn set_completion_triggers(
3105 &mut self,
3106 server_id: LanguageServerId,
3107 triggers: BTreeSet<String>,
3108 cx: &mut Context<Self>,
3109 ) {
3110 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3111 if triggers.is_empty() {
3112 self.completion_triggers_per_language_server
3113 .remove(&server_id);
3114 self.completion_triggers = self
3115 .completion_triggers_per_language_server
3116 .values()
3117 .flat_map(|triggers| triggers.iter().cloned())
3118 .collect();
3119 } else {
3120 self.completion_triggers_per_language_server
3121 .insert(server_id, triggers.clone());
3122 self.completion_triggers.extend(triggers.iter().cloned());
3123 }
3124 self.send_operation(
3125 Operation::UpdateCompletionTriggers {
3126 triggers: triggers.into_iter().collect(),
3127 lamport_timestamp: self.completion_triggers_timestamp,
3128 server_id,
3129 },
3130 true,
3131 cx,
3132 );
3133 cx.notify();
3134 }
3135
3136 /// Returns a list of strings which trigger a completion menu for this language.
3137 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3138 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3139 &self.completion_triggers
3140 }
3141
3142 /// Call this directly after performing edits to prevent the preview tab
3143 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3144 /// to return false until there are additional edits.
3145 pub fn refresh_preview(&mut self) {
3146 self.preview_version = self.version.clone();
3147 }
3148
3149 /// Whether we should preserve the preview status of a tab containing this buffer.
3150 pub fn preserve_preview(&self) -> bool {
3151 !self.has_edits_since(&self.preview_version)
3152 }
3153
3154 pub fn set_group_interval(&mut self, group_interval: Duration) {
3155 self.text.set_group_interval(group_interval);
3156 }
3157}
3158
3159#[doc(hidden)]
3160#[cfg(any(test, feature = "test-support"))]
3161impl Buffer {
3162 pub fn edit_via_marked_text(
3163 &mut self,
3164 marked_string: &str,
3165 autoindent_mode: Option<AutoindentMode>,
3166 cx: &mut Context<Self>,
3167 ) {
3168 let edits = self.edits_for_marked_text(marked_string);
3169 self.edit(edits, autoindent_mode, cx);
3170 }
3171
3172 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3173 where
3174 T: rand::Rng,
3175 {
3176 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3177 let mut last_end = None;
3178 for _ in 0..old_range_count {
3179 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3180 break;
3181 }
3182
3183 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3184 let mut range = self.random_byte_range(new_start, rng);
3185 if rng.random_bool(0.2) {
3186 mem::swap(&mut range.start, &mut range.end);
3187 }
3188 last_end = Some(range.end);
3189
3190 let new_text_len = rng.random_range(0..10);
3191 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3192 new_text = new_text.to_uppercase();
3193
3194 edits.push((range, new_text));
3195 }
3196 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3197 self.edit(edits, None, cx);
3198 }
3199
3200 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3201 let was_dirty = self.is_dirty();
3202 let old_version = self.version.clone();
3203
3204 let ops = self.text.randomly_undo_redo(rng);
3205 if !ops.is_empty() {
3206 for op in ops {
3207 self.send_operation(Operation::Buffer(op), true, cx);
3208 self.did_edit(&old_version, was_dirty, true, cx);
3209 }
3210 }
3211 }
3212}
3213
3214impl EventEmitter<BufferEvent> for Buffer {}
3215
3216fn offset_in_sub_ranges(
3217 sub_ranges: &[Range<Anchor>],
3218 offset: usize,
3219 snapshot: &TextBufferSnapshot,
3220) -> bool {
3221 let start_anchor = snapshot.anchor_before(offset);
3222 let end_anchor = snapshot.anchor_after(offset);
3223
3224 sub_ranges.iter().any(|sub_range| {
3225 let is_before_start = sub_range.end.cmp(&start_anchor, snapshot).is_lt();
3226 let is_after_end = sub_range.start.cmp(&end_anchor, snapshot).is_gt();
3227 !is_before_start && !is_after_end
3228 })
3229}
3230
3231impl Deref for Buffer {
3232 type Target = TextBuffer;
3233
3234 fn deref(&self) -> &Self::Target {
3235 &self.text
3236 }
3237}
3238
3239impl BufferSnapshot {
3240 /// Returns [`IndentSize`] for a given line that respects user settings and
3241 /// language preferences.
3242 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3243 indent_size_for_line(self, row)
3244 }
3245
3246 /// Returns [`IndentSize`] for a given position that respects user settings
3247 /// and language preferences.
3248 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3249 let settings = self.settings_at(position, cx);
3250 if settings.hard_tabs {
3251 IndentSize::tab()
3252 } else {
3253 IndentSize::spaces(settings.tab_size.get())
3254 }
3255 }
3256
3257 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3258 /// is passed in as `single_indent_size`.
3259 pub fn suggested_indents(
3260 &self,
3261 rows: impl Iterator<Item = u32>,
3262 single_indent_size: IndentSize,
3263 ) -> BTreeMap<u32, IndentSize> {
3264 let mut result = BTreeMap::new();
3265
3266 for row_range in contiguous_ranges(rows, 10) {
3267 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3268 Some(suggestions) => suggestions,
3269 _ => break,
3270 };
3271
3272 for (row, suggestion) in row_range.zip(suggestions) {
3273 let indent_size = if let Some(suggestion) = suggestion {
3274 result
3275 .get(&suggestion.basis_row)
3276 .copied()
3277 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3278 .with_delta(suggestion.delta, single_indent_size)
3279 } else {
3280 self.indent_size_for_line(row)
3281 };
3282
3283 result.insert(row, indent_size);
3284 }
3285 }
3286
3287 result
3288 }
3289
3290 fn suggest_autoindents(
3291 &self,
3292 row_range: Range<u32>,
3293 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3294 let config = &self.language.as_ref()?.config;
3295 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3296
3297 #[derive(Debug, Clone)]
3298 struct StartPosition {
3299 start: Point,
3300 suffix: SharedString,
3301 language: Arc<Language>,
3302 }
3303
3304 // Find the suggested indentation ranges based on the syntax tree.
3305 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3306 let end = Point::new(row_range.end, 0);
3307 let range = (start..end).to_offset(&self.text);
3308 let mut matches = self.syntax.matches_with_options(
3309 range.clone(),
3310 &self.text,
3311 TreeSitterOptions {
3312 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3313 max_start_depth: None,
3314 },
3315 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3316 );
3317 let indent_configs = matches
3318 .grammars()
3319 .iter()
3320 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3321 .collect::<Vec<_>>();
3322
3323 let mut indent_ranges = Vec::<Range<Point>>::new();
3324 let mut start_positions = Vec::<StartPosition>::new();
3325 let mut outdent_positions = Vec::<Point>::new();
3326 while let Some(mat) = matches.peek() {
3327 let mut start: Option<Point> = None;
3328 let mut end: Option<Point> = None;
3329
3330 let config = indent_configs[mat.grammar_index];
3331 for capture in mat.captures {
3332 if capture.index == config.indent_capture_ix {
3333 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3334 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3335 } else if Some(capture.index) == config.start_capture_ix {
3336 start = Some(Point::from_ts_point(capture.node.end_position()));
3337 } else if Some(capture.index) == config.end_capture_ix {
3338 end = Some(Point::from_ts_point(capture.node.start_position()));
3339 } else if Some(capture.index) == config.outdent_capture_ix {
3340 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3341 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3342 start_positions.push(StartPosition {
3343 start: Point::from_ts_point(capture.node.start_position()),
3344 suffix: suffix.clone(),
3345 language: mat.language.clone(),
3346 });
3347 }
3348 }
3349
3350 matches.advance();
3351 if let Some((start, end)) = start.zip(end) {
3352 if start.row == end.row {
3353 continue;
3354 }
3355 let range = start..end;
3356 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3357 Err(ix) => indent_ranges.insert(ix, range),
3358 Ok(ix) => {
3359 let prev_range = &mut indent_ranges[ix];
3360 prev_range.end = prev_range.end.max(range.end);
3361 }
3362 }
3363 }
3364 }
3365
3366 let mut error_ranges = Vec::<Range<Point>>::new();
3367 let mut matches = self
3368 .syntax
3369 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3370 while let Some(mat) = matches.peek() {
3371 let node = mat.captures[0].node;
3372 let start = Point::from_ts_point(node.start_position());
3373 let end = Point::from_ts_point(node.end_position());
3374 let range = start..end;
3375 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3376 Ok(ix) | Err(ix) => ix,
3377 };
3378 let mut end_ix = ix;
3379 while let Some(existing_range) = error_ranges.get(end_ix) {
3380 if existing_range.end < end {
3381 end_ix += 1;
3382 } else {
3383 break;
3384 }
3385 }
3386 error_ranges.splice(ix..end_ix, [range]);
3387 matches.advance();
3388 }
3389
3390 outdent_positions.sort();
3391 for outdent_position in outdent_positions {
3392 // find the innermost indent range containing this outdent_position
3393 // set its end to the outdent position
3394 if let Some(range_to_truncate) = indent_ranges
3395 .iter_mut()
3396 .rfind(|indent_range| indent_range.contains(&outdent_position))
3397 {
3398 range_to_truncate.end = outdent_position;
3399 }
3400 }
3401
3402 start_positions.sort_by_key(|b| b.start);
3403
3404 // Find the suggested indentation increases and decreased based on regexes.
3405 let mut regex_outdent_map = HashMap::default();
3406 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3407 let mut start_positions_iter = start_positions.iter().peekable();
3408
3409 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3410 self.for_each_line(
3411 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3412 ..Point::new(row_range.end, 0),
3413 |row, line| {
3414 let indent_len = self.indent_size_for_line(row).len;
3415 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3416 let row_language_config = row_language
3417 .as_ref()
3418 .map(|lang| lang.config())
3419 .unwrap_or(config);
3420
3421 if row_language_config
3422 .decrease_indent_pattern
3423 .as_ref()
3424 .is_some_and(|regex| regex.is_match(line))
3425 {
3426 indent_change_rows.push((row, Ordering::Less));
3427 }
3428 if row_language_config
3429 .increase_indent_pattern
3430 .as_ref()
3431 .is_some_and(|regex| regex.is_match(line))
3432 {
3433 indent_change_rows.push((row + 1, Ordering::Greater));
3434 }
3435 while let Some(pos) = start_positions_iter.peek() {
3436 if pos.start.row < row {
3437 let pos = start_positions_iter.next().unwrap().clone();
3438 last_seen_suffix
3439 .entry(pos.suffix.to_string())
3440 .or_default()
3441 .push(pos);
3442 } else {
3443 break;
3444 }
3445 }
3446 for rule in &row_language_config.decrease_indent_patterns {
3447 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3448 let row_start_column = self.indent_size_for_line(row).len;
3449 let basis_row = rule
3450 .valid_after
3451 .iter()
3452 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3453 .flatten()
3454 .filter(|pos| {
3455 row_language
3456 .as_ref()
3457 .or(self.language.as_ref())
3458 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3459 })
3460 .filter(|pos| pos.start.column <= row_start_column)
3461 .max_by_key(|pos| pos.start.row);
3462 if let Some(outdent_to) = basis_row {
3463 regex_outdent_map.insert(row, outdent_to.start.row);
3464 }
3465 break;
3466 }
3467 }
3468 },
3469 );
3470
3471 let mut indent_changes = indent_change_rows.into_iter().peekable();
3472 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3473 prev_non_blank_row.unwrap_or(0)
3474 } else {
3475 row_range.start.saturating_sub(1)
3476 };
3477
3478 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3479 Some(row_range.map(move |row| {
3480 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3481
3482 let mut indent_from_prev_row = false;
3483 let mut outdent_from_prev_row = false;
3484 let mut outdent_to_row = u32::MAX;
3485 let mut from_regex = false;
3486
3487 while let Some((indent_row, delta)) = indent_changes.peek() {
3488 match indent_row.cmp(&row) {
3489 Ordering::Equal => match delta {
3490 Ordering::Less => {
3491 from_regex = true;
3492 outdent_from_prev_row = true
3493 }
3494 Ordering::Greater => {
3495 indent_from_prev_row = true;
3496 from_regex = true
3497 }
3498 _ => {}
3499 },
3500
3501 Ordering::Greater => break,
3502 Ordering::Less => {}
3503 }
3504
3505 indent_changes.next();
3506 }
3507
3508 for range in &indent_ranges {
3509 if range.start.row >= row {
3510 break;
3511 }
3512 if range.start.row == prev_row && range.end > row_start {
3513 indent_from_prev_row = true;
3514 }
3515 if range.end > prev_row_start && range.end <= row_start {
3516 outdent_to_row = outdent_to_row.min(range.start.row);
3517 }
3518 }
3519
3520 if let Some(basis_row) = regex_outdent_map.get(&row) {
3521 indent_from_prev_row = false;
3522 outdent_to_row = *basis_row;
3523 from_regex = true;
3524 }
3525
3526 let within_error = error_ranges
3527 .iter()
3528 .any(|e| e.start.row < row && e.end > row_start);
3529
3530 let suggestion = if outdent_to_row == prev_row
3531 || (outdent_from_prev_row && indent_from_prev_row)
3532 {
3533 Some(IndentSuggestion {
3534 basis_row: prev_row,
3535 delta: Ordering::Equal,
3536 within_error: within_error && !from_regex,
3537 })
3538 } else if indent_from_prev_row {
3539 Some(IndentSuggestion {
3540 basis_row: prev_row,
3541 delta: Ordering::Greater,
3542 within_error: within_error && !from_regex,
3543 })
3544 } else if outdent_to_row < prev_row {
3545 Some(IndentSuggestion {
3546 basis_row: outdent_to_row,
3547 delta: Ordering::Equal,
3548 within_error: within_error && !from_regex,
3549 })
3550 } else if outdent_from_prev_row {
3551 Some(IndentSuggestion {
3552 basis_row: prev_row,
3553 delta: Ordering::Less,
3554 within_error: within_error && !from_regex,
3555 })
3556 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3557 {
3558 Some(IndentSuggestion {
3559 basis_row: prev_row,
3560 delta: Ordering::Equal,
3561 within_error: within_error && !from_regex,
3562 })
3563 } else {
3564 None
3565 };
3566
3567 prev_row = row;
3568 prev_row_start = row_start;
3569 suggestion
3570 }))
3571 }
3572
3573 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3574 while row > 0 {
3575 row -= 1;
3576 if !self.is_line_blank(row) {
3577 return Some(row);
3578 }
3579 }
3580 None
3581 }
3582
3583 pub fn captures(
3584 &self,
3585 range: Range<usize>,
3586 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
3587 ) -> SyntaxMapCaptures<'_> {
3588 self.syntax.captures(range, &self.text, query)
3589 }
3590
3591 #[ztracing::instrument(skip_all)]
3592 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3593 let captures = self.syntax.captures(range, &self.text, |grammar| {
3594 grammar
3595 .highlights_config
3596 .as_ref()
3597 .map(|config| &config.query)
3598 });
3599 let highlight_maps = captures
3600 .grammars()
3601 .iter()
3602 .map(|grammar| grammar.highlight_map())
3603 .collect();
3604 (captures, highlight_maps)
3605 }
3606
3607 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3608 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3609 /// returned in chunks where each chunk has a single syntax highlighting style and
3610 /// diagnostic status.
3611 #[ztracing::instrument(skip_all)]
3612 pub fn chunks<T: ToOffset>(
3613 &self,
3614 range: Range<T>,
3615 language_aware: LanguageAwareStyling,
3616 ) -> BufferChunks<'_> {
3617 let range = range.start.to_offset(self)..range.end.to_offset(self);
3618
3619 let mut syntax = None;
3620 if language_aware.tree_sitter {
3621 syntax = Some(self.get_highlights(range.clone()));
3622 }
3623 BufferChunks::new(
3624 self.text.as_rope(),
3625 range,
3626 syntax,
3627 language_aware.diagnostics,
3628 Some(self),
3629 )
3630 }
3631
3632 pub fn highlighted_text_for_range<T: ToOffset>(
3633 &self,
3634 range: Range<T>,
3635 override_style: Option<HighlightStyle>,
3636 syntax_theme: &SyntaxTheme,
3637 ) -> HighlightedText {
3638 HighlightedText::from_buffer_range(
3639 range,
3640 &self.text,
3641 &self.syntax,
3642 override_style,
3643 syntax_theme,
3644 )
3645 }
3646
3647 /// Invokes the given callback for each line of text in the given range of the buffer.
3648 /// Uses callback to avoid allocating a string for each line.
3649 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3650 let mut line = String::new();
3651 let mut row = range.start.row;
3652 for chunk in self
3653 .as_rope()
3654 .chunks_in_range(range.to_offset(self))
3655 .chain(["\n"])
3656 {
3657 for (newline_ix, text) in chunk.split('\n').enumerate() {
3658 if newline_ix > 0 {
3659 callback(row, &line);
3660 row += 1;
3661 line.clear();
3662 }
3663 line.push_str(text);
3664 }
3665 }
3666 }
3667
3668 /// Iterates over every [`SyntaxLayer`] in the buffer.
3669 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3670 self.syntax_layers_for_range(0..self.len(), true)
3671 }
3672
3673 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3674 let offset = position.to_offset(self);
3675 self.syntax_layers_for_range(offset..offset, false)
3676 .filter(|l| {
3677 if let Some(ranges) = l.included_sub_ranges {
3678 ranges.iter().any(|range| {
3679 let start = range.start.to_offset(self);
3680 start <= offset && {
3681 let end = range.end.to_offset(self);
3682 offset < end
3683 }
3684 })
3685 } else {
3686 l.node().start_byte() <= offset && l.node().end_byte() > offset
3687 }
3688 })
3689 .last()
3690 }
3691
3692 pub fn syntax_layers_for_range<D: ToOffset>(
3693 &self,
3694 range: Range<D>,
3695 include_hidden: bool,
3696 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3697 self.syntax
3698 .layers_for_range(range, &self.text, include_hidden)
3699 }
3700
3701 pub fn syntax_layers_languages(&self) -> impl Iterator<Item = &Arc<Language>> {
3702 self.syntax.languages(&self, true)
3703 }
3704
3705 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3706 &self,
3707 range: Range<D>,
3708 ) -> Option<SyntaxLayer<'_>> {
3709 let range = range.to_offset(self);
3710 self.syntax
3711 .layers_for_range(range, &self.text, false)
3712 .max_by(|a, b| {
3713 if a.depth != b.depth {
3714 a.depth.cmp(&b.depth)
3715 } else if a.offset.0 != b.offset.0 {
3716 a.offset.0.cmp(&b.offset.0)
3717 } else {
3718 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3719 }
3720 })
3721 }
3722
3723 /// Returns the [`ModelineSettings`].
3724 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
3725 self.modeline.as_ref()
3726 }
3727
3728 /// Returns the main [`Language`].
3729 pub fn language(&self) -> Option<&Arc<Language>> {
3730 self.language.as_ref()
3731 }
3732
3733 /// Returns the [`Language`] at the given location.
3734 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3735 self.syntax_layer_at(position)
3736 .map(|info| info.language)
3737 .or(self.language.as_ref())
3738 }
3739
3740 /// Returns the settings for the language at the given location.
3741 pub fn settings_at<'a, D: ToOffset>(
3742 &'a self,
3743 position: D,
3744 cx: &'a App,
3745 ) -> Cow<'a, LanguageSettings> {
3746 LanguageSettings::for_buffer_snapshot(self, Some(position.to_offset(self)), cx)
3747 }
3748
3749 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3750 CharClassifier::new(self.language_scope_at(point))
3751 }
3752
3753 /// Returns the [`LanguageScope`] at the given location.
3754 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3755 let offset = position.to_offset(self);
3756 let mut scope = None;
3757 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3758 let text: &TextBufferSnapshot = self;
3759
3760 // Use the layer that has the smallest node intersecting the given point.
3761 for layer in self
3762 .syntax
3763 .layers_for_range(offset..offset, &self.text, false)
3764 {
3765 if let Some(ranges) = layer.included_sub_ranges
3766 && !offset_in_sub_ranges(ranges, offset, text)
3767 {
3768 continue;
3769 }
3770
3771 let mut cursor = layer.node().walk();
3772
3773 let mut range = None;
3774 loop {
3775 let child_range = cursor.node().byte_range();
3776 if !child_range.contains(&offset) {
3777 break;
3778 }
3779
3780 range = Some(child_range);
3781 if cursor.goto_first_child_for_byte(offset).is_none() {
3782 break;
3783 }
3784 }
3785
3786 if let Some(range) = range
3787 && smallest_range_and_depth.as_ref().is_none_or(
3788 |(smallest_range, smallest_range_depth)| {
3789 if layer.depth > *smallest_range_depth {
3790 true
3791 } else if layer.depth == *smallest_range_depth {
3792 range.len() < smallest_range.len()
3793 } else {
3794 false
3795 }
3796 },
3797 )
3798 {
3799 smallest_range_and_depth = Some((range, layer.depth));
3800 scope = Some(LanguageScope {
3801 language: layer.language.clone(),
3802 override_id: layer.override_id(offset, &self.text),
3803 });
3804 }
3805 }
3806
3807 scope.or_else(|| {
3808 self.language.clone().map(|language| LanguageScope {
3809 language,
3810 override_id: None,
3811 })
3812 })
3813 }
3814
3815 /// Returns a tuple of the range and character kind of the word
3816 /// surrounding the given position.
3817 pub fn surrounding_word<T: ToOffset>(
3818 &self,
3819 start: T,
3820 scope_context: Option<CharScopeContext>,
3821 ) -> (Range<usize>, Option<CharKind>) {
3822 let mut start = start.to_offset(self);
3823 let mut end = start;
3824 let mut next_chars = self.chars_at(start).take(128).peekable();
3825 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3826
3827 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3828 let word_kind = cmp::max(
3829 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3830 next_chars.peek().copied().map(|c| classifier.kind(c)),
3831 );
3832
3833 for ch in prev_chars {
3834 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3835 start -= ch.len_utf8();
3836 } else {
3837 break;
3838 }
3839 }
3840
3841 for ch in next_chars {
3842 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3843 end += ch.len_utf8();
3844 } else {
3845 break;
3846 }
3847 }
3848
3849 (start..end, word_kind)
3850 }
3851
3852 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3853 /// range. When `require_larger` is true, the node found must be larger than the query range.
3854 ///
3855 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3856 /// be moved to the root of the tree.
3857 fn goto_node_enclosing_range(
3858 cursor: &mut tree_sitter::TreeCursor,
3859 query_range: &Range<usize>,
3860 require_larger: bool,
3861 ) -> bool {
3862 let mut ascending = false;
3863 loop {
3864 let mut range = cursor.node().byte_range();
3865 if query_range.is_empty() {
3866 // When the query range is empty and the current node starts after it, move to the
3867 // previous sibling to find the node the containing node.
3868 if range.start > query_range.start {
3869 cursor.goto_previous_sibling();
3870 range = cursor.node().byte_range();
3871 }
3872 } else {
3873 // When the query range is non-empty and the current node ends exactly at the start,
3874 // move to the next sibling to find a node that extends beyond the start.
3875 if range.end == query_range.start {
3876 cursor.goto_next_sibling();
3877 range = cursor.node().byte_range();
3878 }
3879 }
3880
3881 let encloses = range.contains_inclusive(query_range)
3882 && (!require_larger || range.len() > query_range.len());
3883 if !encloses {
3884 ascending = true;
3885 if !cursor.goto_parent() {
3886 return false;
3887 }
3888 continue;
3889 } else if ascending {
3890 return true;
3891 }
3892
3893 // Descend into the current node.
3894 if cursor
3895 .goto_first_child_for_byte(query_range.start)
3896 .is_none()
3897 {
3898 return true;
3899 }
3900 }
3901 }
3902
3903 pub fn syntax_ancestor<'a, T: ToOffset>(
3904 &'a self,
3905 range: Range<T>,
3906 ) -> Option<tree_sitter::Node<'a>> {
3907 let range = range.start.to_offset(self)..range.end.to_offset(self);
3908 let mut result: Option<tree_sitter::Node<'a>> = None;
3909 for layer in self
3910 .syntax
3911 .layers_for_range(range.clone(), &self.text, true)
3912 {
3913 let mut cursor = layer.node().walk();
3914
3915 // Find the node that both contains the range and is larger than it.
3916 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3917 continue;
3918 }
3919
3920 let left_node = cursor.node();
3921 let mut layer_result = left_node;
3922
3923 // For an empty range, try to find another node immediately to the right of the range.
3924 if left_node.end_byte() == range.start {
3925 let mut right_node = None;
3926 while !cursor.goto_next_sibling() {
3927 if !cursor.goto_parent() {
3928 break;
3929 }
3930 }
3931
3932 while cursor.node().start_byte() == range.start {
3933 right_node = Some(cursor.node());
3934 if !cursor.goto_first_child() {
3935 break;
3936 }
3937 }
3938
3939 // If there is a candidate node on both sides of the (empty) range, then
3940 // decide between the two by favoring a named node over an anonymous token.
3941 // If both nodes are the same in that regard, favor the right one.
3942 if let Some(right_node) = right_node
3943 && (right_node.is_named() || !left_node.is_named())
3944 {
3945 layer_result = right_node;
3946 }
3947 }
3948
3949 if let Some(previous_result) = &result
3950 && previous_result.byte_range().len() < layer_result.byte_range().len()
3951 {
3952 continue;
3953 }
3954 result = Some(layer_result);
3955 }
3956
3957 result
3958 }
3959
3960 /// Find the previous sibling syntax node at the given range.
3961 ///
3962 /// This function locates the syntax node that precedes the node containing
3963 /// the given range. It searches hierarchically by:
3964 /// 1. Finding the node that contains the given range
3965 /// 2. Looking for the previous sibling at the same tree level
3966 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3967 ///
3968 /// Returns `None` if there is no previous sibling at any ancestor level.
3969 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3970 &'a self,
3971 range: Range<T>,
3972 ) -> Option<tree_sitter::Node<'a>> {
3973 let range = range.start.to_offset(self)..range.end.to_offset(self);
3974 let mut result: Option<tree_sitter::Node<'a>> = None;
3975
3976 for layer in self
3977 .syntax
3978 .layers_for_range(range.clone(), &self.text, true)
3979 {
3980 let mut cursor = layer.node().walk();
3981
3982 // Find the node that contains the range
3983 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3984 continue;
3985 }
3986
3987 // Look for the previous sibling, moving up ancestor levels if needed
3988 loop {
3989 if cursor.goto_previous_sibling() {
3990 let layer_result = cursor.node();
3991
3992 if let Some(previous_result) = &result {
3993 if previous_result.byte_range().end < layer_result.byte_range().end {
3994 continue;
3995 }
3996 }
3997 result = Some(layer_result);
3998 break;
3999 }
4000
4001 // No sibling found at this level, try moving up to parent
4002 if !cursor.goto_parent() {
4003 break;
4004 }
4005 }
4006 }
4007
4008 result
4009 }
4010
4011 /// Find the next sibling syntax node at the given range.
4012 ///
4013 /// This function locates the syntax node that follows the node containing
4014 /// the given range. It searches hierarchically by:
4015 /// 1. Finding the node that contains the given range
4016 /// 2. Looking for the next sibling at the same tree level
4017 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4018 ///
4019 /// Returns `None` if there is no next sibling at any ancestor level.
4020 pub fn syntax_next_sibling<'a, T: ToOffset>(
4021 &'a self,
4022 range: Range<T>,
4023 ) -> Option<tree_sitter::Node<'a>> {
4024 let range = range.start.to_offset(self)..range.end.to_offset(self);
4025 let mut result: Option<tree_sitter::Node<'a>> = None;
4026
4027 for layer in self
4028 .syntax
4029 .layers_for_range(range.clone(), &self.text, true)
4030 {
4031 let mut cursor = layer.node().walk();
4032
4033 // Find the node that contains the range
4034 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4035 continue;
4036 }
4037
4038 // Look for the next sibling, moving up ancestor levels if needed
4039 loop {
4040 if cursor.goto_next_sibling() {
4041 let layer_result = cursor.node();
4042
4043 if let Some(previous_result) = &result {
4044 if previous_result.byte_range().start > layer_result.byte_range().start {
4045 continue;
4046 }
4047 }
4048 result = Some(layer_result);
4049 break;
4050 }
4051
4052 // No sibling found at this level, try moving up to parent
4053 if !cursor.goto_parent() {
4054 break;
4055 }
4056 }
4057 }
4058
4059 result
4060 }
4061
4062 /// Returns the root syntax node within the given row
4063 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4064 let start_offset = position.to_offset(self);
4065
4066 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4067
4068 let layer = self
4069 .syntax
4070 .layers_for_range(start_offset..start_offset, &self.text, true)
4071 .next()?;
4072
4073 let mut cursor = layer.node().walk();
4074
4075 // Descend to the first leaf that touches the start of the range.
4076 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4077 if cursor.node().end_byte() == start_offset {
4078 cursor.goto_next_sibling();
4079 }
4080 }
4081
4082 // Ascend to the root node within the same row.
4083 while cursor.goto_parent() {
4084 if cursor.node().start_position().row != row {
4085 break;
4086 }
4087 }
4088
4089 Some(cursor.node())
4090 }
4091
4092 /// Returns the outline for the buffer.
4093 ///
4094 /// This method allows passing an optional [`SyntaxTheme`] to
4095 /// syntax-highlight the returned symbols.
4096 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4097 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4098 }
4099
4100 /// Returns all the symbols that contain the given position.
4101 ///
4102 /// This method allows passing an optional [`SyntaxTheme`] to
4103 /// syntax-highlight the returned symbols.
4104 pub fn symbols_containing<T: ToOffset>(
4105 &self,
4106 position: T,
4107 theme: Option<&SyntaxTheme>,
4108 ) -> Vec<OutlineItem<Anchor>> {
4109 let position = position.to_offset(self);
4110 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4111 let end = self.clip_offset(position + 1, Bias::Right);
4112 let mut items = self.outline_items_containing(start..end, false, theme);
4113 let mut prev_depth = None;
4114 items.retain(|item| {
4115 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4116 prev_depth = Some(item.depth);
4117 result
4118 });
4119 items
4120 }
4121
4122 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4123 let range = range.to_offset(self);
4124 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4125 grammar.outline_config.as_ref().map(|c| &c.query)
4126 });
4127 let configs = matches
4128 .grammars()
4129 .iter()
4130 .map(|g| g.outline_config.as_ref().unwrap())
4131 .collect::<Vec<_>>();
4132
4133 while let Some(mat) = matches.peek() {
4134 let config = &configs[mat.grammar_index];
4135 let containing_item_node = maybe!({
4136 let item_node = mat.captures.iter().find_map(|cap| {
4137 if cap.index == config.item_capture_ix {
4138 Some(cap.node)
4139 } else {
4140 None
4141 }
4142 })?;
4143
4144 let item_byte_range = item_node.byte_range();
4145 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4146 None
4147 } else {
4148 Some(item_node)
4149 }
4150 });
4151
4152 if let Some(item_node) = containing_item_node {
4153 return Some(
4154 Point::from_ts_point(item_node.start_position())
4155 ..Point::from_ts_point(item_node.end_position()),
4156 );
4157 }
4158
4159 matches.advance();
4160 }
4161 None
4162 }
4163
4164 pub fn outline_items_containing<T: ToOffset>(
4165 &self,
4166 range: Range<T>,
4167 include_extra_context: bool,
4168 theme: Option<&SyntaxTheme>,
4169 ) -> Vec<OutlineItem<Anchor>> {
4170 self.outline_items_containing_internal(
4171 range,
4172 include_extra_context,
4173 theme,
4174 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4175 )
4176 }
4177
4178 pub fn outline_items_as_points_containing<T: ToOffset>(
4179 &self,
4180 range: Range<T>,
4181 include_extra_context: bool,
4182 theme: Option<&SyntaxTheme>,
4183 ) -> Vec<OutlineItem<Point>> {
4184 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4185 range
4186 })
4187 }
4188
4189 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4190 &self,
4191 range: Range<T>,
4192 include_extra_context: bool,
4193 theme: Option<&SyntaxTheme>,
4194 ) -> Vec<OutlineItem<usize>> {
4195 self.outline_items_containing_internal(
4196 range,
4197 include_extra_context,
4198 theme,
4199 |buffer, range| range.to_offset(buffer),
4200 )
4201 }
4202
4203 fn outline_items_containing_internal<T: ToOffset, U>(
4204 &self,
4205 range: Range<T>,
4206 include_extra_context: bool,
4207 theme: Option<&SyntaxTheme>,
4208 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4209 ) -> Vec<OutlineItem<U>> {
4210 let range = range.to_offset(self);
4211 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4212 grammar.outline_config.as_ref().map(|c| &c.query)
4213 });
4214
4215 let mut items = Vec::new();
4216 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4217 while let Some(mat) = matches.peek() {
4218 let config = matches.grammars()[mat.grammar_index]
4219 .outline_config
4220 .as_ref()
4221 .unwrap();
4222 if let Some(item) =
4223 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4224 {
4225 items.push(item);
4226 } else if let Some(capture) = mat
4227 .captures
4228 .iter()
4229 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4230 {
4231 let capture_range = capture.node.start_position()..capture.node.end_position();
4232 let mut capture_row_range =
4233 capture_range.start.row as u32..capture_range.end.row as u32;
4234 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4235 {
4236 capture_row_range.end -= 1;
4237 }
4238 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4239 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4240 last_row_range.end = capture_row_range.end;
4241 } else {
4242 annotation_row_ranges.push(capture_row_range);
4243 }
4244 } else {
4245 annotation_row_ranges.push(capture_row_range);
4246 }
4247 }
4248 matches.advance();
4249 }
4250
4251 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4252
4253 // Assign depths based on containment relationships and convert to anchors.
4254 let mut item_ends_stack = Vec::<Point>::new();
4255 let mut anchor_items = Vec::new();
4256 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4257 for item in items {
4258 while let Some(last_end) = item_ends_stack.last().copied() {
4259 if last_end < item.range.end {
4260 item_ends_stack.pop();
4261 } else {
4262 break;
4263 }
4264 }
4265
4266 let mut annotation_row_range = None;
4267 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4268 let row_preceding_item = item.range.start.row.saturating_sub(1);
4269 if next_annotation_row_range.end < row_preceding_item {
4270 annotation_row_ranges.next();
4271 } else {
4272 if next_annotation_row_range.end == row_preceding_item {
4273 annotation_row_range = Some(next_annotation_row_range.clone());
4274 annotation_row_ranges.next();
4275 }
4276 break;
4277 }
4278 }
4279
4280 anchor_items.push(OutlineItem {
4281 depth: item_ends_stack.len(),
4282 range: range_callback(self, item.range.clone()),
4283 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4284 text: item.text,
4285 highlight_ranges: item.highlight_ranges,
4286 name_ranges: item.name_ranges,
4287 body_range: item.body_range.map(|r| range_callback(self, r)),
4288 annotation_range: annotation_row_range.map(|annotation_range| {
4289 let point_range = Point::new(annotation_range.start, 0)
4290 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4291 range_callback(self, point_range)
4292 }),
4293 });
4294 item_ends_stack.push(item.range.end);
4295 }
4296
4297 anchor_items
4298 }
4299
4300 fn next_outline_item(
4301 &self,
4302 config: &OutlineConfig,
4303 mat: &SyntaxMapMatch,
4304 range: &Range<usize>,
4305 include_extra_context: bool,
4306 theme: Option<&SyntaxTheme>,
4307 ) -> Option<OutlineItem<Point>> {
4308 let item_node = mat.captures.iter().find_map(|cap| {
4309 if cap.index == config.item_capture_ix {
4310 Some(cap.node)
4311 } else {
4312 None
4313 }
4314 })?;
4315
4316 let item_byte_range = item_node.byte_range();
4317 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4318 return None;
4319 }
4320 let item_point_range = Point::from_ts_point(item_node.start_position())
4321 ..Point::from_ts_point(item_node.end_position());
4322
4323 let mut open_point = None;
4324 let mut close_point = None;
4325
4326 let mut buffer_ranges = Vec::new();
4327 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4328 let mut range = node.start_byte()..node.end_byte();
4329 let start = node.start_position();
4330 if node.end_position().row > start.row {
4331 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4332 }
4333
4334 if !range.is_empty() {
4335 buffer_ranges.push((range, node_is_name));
4336 }
4337 };
4338
4339 for capture in mat.captures {
4340 if capture.index == config.name_capture_ix {
4341 add_to_buffer_ranges(capture.node, true);
4342 } else if Some(capture.index) == config.context_capture_ix
4343 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4344 {
4345 add_to_buffer_ranges(capture.node, false);
4346 } else {
4347 if Some(capture.index) == config.open_capture_ix {
4348 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4349 } else if Some(capture.index) == config.close_capture_ix {
4350 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4351 }
4352 }
4353 }
4354
4355 if buffer_ranges.is_empty() {
4356 return None;
4357 }
4358 let source_range_for_text =
4359 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4360
4361 let mut text = String::new();
4362 let mut highlight_ranges = Vec::new();
4363 let mut name_ranges = Vec::new();
4364 let mut chunks = self.chunks(
4365 source_range_for_text.clone(),
4366 LanguageAwareStyling {
4367 tree_sitter: true,
4368 diagnostics: true,
4369 },
4370 );
4371 let mut last_buffer_range_end = 0;
4372 for (buffer_range, is_name) in buffer_ranges {
4373 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4374 if space_added {
4375 text.push(' ');
4376 }
4377 let before_append_len = text.len();
4378 let mut offset = buffer_range.start;
4379 chunks.seek(buffer_range.clone());
4380 for mut chunk in chunks.by_ref() {
4381 if chunk.text.len() > buffer_range.end - offset {
4382 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4383 offset = buffer_range.end;
4384 } else {
4385 offset += chunk.text.len();
4386 }
4387 let style = chunk
4388 .syntax_highlight_id
4389 .zip(theme)
4390 .and_then(|(highlight, theme)| theme.get(highlight).cloned());
4391
4392 if let Some(style) = style {
4393 let start = text.len();
4394 let end = start + chunk.text.len();
4395 highlight_ranges.push((start..end, style));
4396 }
4397 text.push_str(chunk.text);
4398 if offset >= buffer_range.end {
4399 break;
4400 }
4401 }
4402 if is_name {
4403 let after_append_len = text.len();
4404 let start = if space_added && !name_ranges.is_empty() {
4405 before_append_len - 1
4406 } else {
4407 before_append_len
4408 };
4409 name_ranges.push(start..after_append_len);
4410 }
4411 last_buffer_range_end = buffer_range.end;
4412 }
4413
4414 Some(OutlineItem {
4415 depth: 0, // We'll calculate the depth later
4416 range: item_point_range,
4417 source_range_for_text: source_range_for_text.to_point(self),
4418 text,
4419 highlight_ranges,
4420 name_ranges,
4421 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4422 annotation_range: None,
4423 })
4424 }
4425
4426 pub fn function_body_fold_ranges<T: ToOffset>(
4427 &self,
4428 within: Range<T>,
4429 ) -> impl Iterator<Item = Range<usize>> + '_ {
4430 self.text_object_ranges(within, TreeSitterOptions::default())
4431 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4432 }
4433
4434 /// For each grammar in the language, runs the provided
4435 /// [`tree_sitter::Query`] against the given range.
4436 pub fn matches(
4437 &self,
4438 range: Range<usize>,
4439 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4440 ) -> SyntaxMapMatches<'_> {
4441 self.syntax.matches(range, self, query)
4442 }
4443
4444 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4445 /// Hence, may return more bracket pairs than the range contains.
4446 ///
4447 /// Will omit known chunks.
4448 /// The resulting bracket match collections are not ordered.
4449 pub fn fetch_bracket_ranges(
4450 &self,
4451 range: Range<usize>,
4452 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4453 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4454 let mut all_bracket_matches = HashMap::default();
4455
4456 for chunk in self
4457 .tree_sitter_data
4458 .chunks
4459 .applicable_chunks(&[range.to_point(self)])
4460 {
4461 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4462 continue;
4463 }
4464 let chunk_range = chunk.anchor_range();
4465 let chunk_range = chunk_range.to_offset(&self);
4466
4467 if let Some(cached_brackets) =
4468 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4469 {
4470 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4471 continue;
4472 }
4473
4474 let mut all_brackets: Vec<(BracketMatch<usize>, usize, bool)> = Vec::new();
4475 let mut opens = Vec::new();
4476 let mut color_pairs = Vec::new();
4477
4478 let mut matches = self.syntax.matches_with_options(
4479 chunk_range.clone(),
4480 &self.text,
4481 TreeSitterOptions {
4482 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4483 max_start_depth: None,
4484 },
4485 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4486 );
4487 let configs = matches
4488 .grammars()
4489 .iter()
4490 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4491 .collect::<Vec<_>>();
4492
4493 // Group matches by open range so we can either trust grammar output
4494 // or repair it by picking a single closest close per open.
4495 let mut open_to_close_ranges = BTreeMap::new();
4496 while let Some(mat) = matches.peek() {
4497 let mut open = None;
4498 let mut close = None;
4499 let syntax_layer_depth = mat.depth;
4500 let pattern_index = mat.pattern_index;
4501 let config = configs[mat.grammar_index];
4502 let pattern = &config.patterns[pattern_index];
4503 for capture in mat.captures {
4504 if capture.index == config.open_capture_ix {
4505 open = Some(capture.node.byte_range());
4506 } else if capture.index == config.close_capture_ix {
4507 close = Some(capture.node.byte_range());
4508 }
4509 }
4510
4511 matches.advance();
4512
4513 let Some((open_range, close_range)) = open.zip(close) else {
4514 continue;
4515 };
4516
4517 let bracket_range = open_range.start..=close_range.end;
4518 if !bracket_range.overlaps(&chunk_range) {
4519 continue;
4520 }
4521
4522 open_to_close_ranges
4523 .entry((open_range.start, open_range.end, pattern_index))
4524 .or_insert_with(BTreeMap::new)
4525 .insert(
4526 (close_range.start, close_range.end),
4527 BracketMatch {
4528 open_range: open_range.clone(),
4529 close_range: close_range.clone(),
4530 syntax_layer_depth,
4531 newline_only: pattern.newline_only,
4532 color_index: None,
4533 },
4534 );
4535
4536 all_brackets.push((
4537 BracketMatch {
4538 open_range,
4539 close_range,
4540 syntax_layer_depth,
4541 newline_only: pattern.newline_only,
4542 color_index: None,
4543 },
4544 pattern_index,
4545 pattern.rainbow_exclude,
4546 ));
4547 }
4548
4549 let has_bogus_matches = open_to_close_ranges
4550 .iter()
4551 .any(|(_, end_ranges)| end_ranges.len() > 1);
4552 if has_bogus_matches {
4553 // Grammar is producing bogus matches where one open is paired with multiple
4554 // closes. Build a valid stack by walking through positions in order.
4555 // For each close, we know the expected open_len from tree-sitter matches.
4556
4557 // Map each close to its expected open length (for inferring opens)
4558 let close_to_open_len: HashMap<(usize, usize, usize), usize> = all_brackets
4559 .iter()
4560 .map(|(bracket_match, pattern_index, _)| {
4561 (
4562 (
4563 bracket_match.close_range.start,
4564 bracket_match.close_range.end,
4565 *pattern_index,
4566 ),
4567 bracket_match.open_range.len(),
4568 )
4569 })
4570 .collect();
4571
4572 // Collect unique opens and closes within this chunk
4573 let mut unique_opens: HashSet<(usize, usize, usize)> = all_brackets
4574 .iter()
4575 .map(|(bracket_match, pattern_index, _)| {
4576 (
4577 bracket_match.open_range.start,
4578 bracket_match.open_range.end,
4579 *pattern_index,
4580 )
4581 })
4582 .filter(|(start, _, _)| chunk_range.contains(start))
4583 .collect();
4584
4585 let mut unique_closes: Vec<(usize, usize, usize)> = all_brackets
4586 .iter()
4587 .map(|(bracket_match, pattern_index, _)| {
4588 (
4589 bracket_match.close_range.start,
4590 bracket_match.close_range.end,
4591 *pattern_index,
4592 )
4593 })
4594 .filter(|(start, _, _)| chunk_range.contains(start))
4595 .collect();
4596 unique_closes.sort();
4597 unique_closes.dedup();
4598
4599 // Build valid pairs by walking through closes in order
4600 let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4601 unique_opens_vec.sort();
4602
4603 let mut valid_pairs: HashSet<((usize, usize, usize), (usize, usize, usize))> =
4604 HashSet::default();
4605 let mut open_stacks: HashMap<usize, Vec<(usize, usize)>> = HashMap::default();
4606 let mut open_idx = 0;
4607
4608 for close in &unique_closes {
4609 // Push all opens before this close onto stack
4610 while open_idx < unique_opens_vec.len()
4611 && unique_opens_vec[open_idx].0 < close.0
4612 {
4613 let (start, end, pattern_index) = unique_opens_vec[open_idx];
4614 open_stacks
4615 .entry(pattern_index)
4616 .or_default()
4617 .push((start, end));
4618 open_idx += 1;
4619 }
4620
4621 // Try to match with most recent open
4622 let (close_start, close_end, pattern_index) = *close;
4623 if let Some(open) = open_stacks
4624 .get_mut(&pattern_index)
4625 .and_then(|open_stack| open_stack.pop())
4626 {
4627 valid_pairs.insert(((open.0, open.1, pattern_index), *close));
4628 } else if let Some(&open_len) = close_to_open_len.get(close) {
4629 // No open on stack - infer one based on expected open_len
4630 if close_start >= open_len {
4631 let inferred = (close_start - open_len, close_start, pattern_index);
4632 unique_opens.insert(inferred);
4633 valid_pairs.insert((inferred, *close));
4634 all_brackets.push((
4635 BracketMatch {
4636 open_range: inferred.0..inferred.1,
4637 close_range: close_start..close_end,
4638 newline_only: false,
4639 syntax_layer_depth: 0,
4640 color_index: None,
4641 },
4642 pattern_index,
4643 false,
4644 ));
4645 }
4646 }
4647 }
4648
4649 all_brackets.retain(|(bracket_match, pattern_index, _)| {
4650 let open = (
4651 bracket_match.open_range.start,
4652 bracket_match.open_range.end,
4653 *pattern_index,
4654 );
4655 let close = (
4656 bracket_match.close_range.start,
4657 bracket_match.close_range.end,
4658 *pattern_index,
4659 );
4660 valid_pairs.contains(&(open, close))
4661 });
4662 }
4663
4664 let mut all_brackets = all_brackets
4665 .into_iter()
4666 .enumerate()
4667 .map(|(index, (bracket_match, _, rainbow_exclude))| {
4668 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4669 // bracket will match the entire tag with all text inside.
4670 // For now, avoid highlighting any pair that has more than single char in each bracket.
4671 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4672 let should_color = !rainbow_exclude
4673 && (bracket_match.open_range.len() == 1
4674 || bracket_match.close_range.len() == 1);
4675 if should_color {
4676 opens.push(bracket_match.open_range.clone());
4677 color_pairs.push((
4678 bracket_match.open_range.clone(),
4679 bracket_match.close_range.clone(),
4680 index,
4681 ));
4682 }
4683 bracket_match
4684 })
4685 .collect::<Vec<_>>();
4686
4687 opens.sort_by_key(|r| (r.start, r.end));
4688 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4689 color_pairs.sort_by_key(|(_, close, _)| close.end);
4690
4691 let mut open_stack = Vec::new();
4692 let mut open_index = 0;
4693 for (open, close, index) in color_pairs {
4694 while open_index < opens.len() && opens[open_index].start < close.start {
4695 open_stack.push(opens[open_index].clone());
4696 open_index += 1;
4697 }
4698
4699 if open_stack.last() == Some(&open) {
4700 let depth_index = open_stack.len() - 1;
4701 all_brackets[index].color_index = Some(depth_index);
4702 open_stack.pop();
4703 }
4704 }
4705
4706 all_brackets.sort_by_key(|bracket_match| {
4707 (bracket_match.open_range.start, bracket_match.open_range.end)
4708 });
4709
4710 if let empty_slot @ None =
4711 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4712 {
4713 *empty_slot = Some(all_brackets.clone());
4714 }
4715 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4716 }
4717
4718 all_bracket_matches
4719 }
4720
4721 pub fn all_bracket_ranges(
4722 &self,
4723 range: Range<usize>,
4724 ) -> impl Iterator<Item = BracketMatch<usize>> {
4725 self.fetch_bracket_ranges(range.clone(), None)
4726 .into_values()
4727 .flatten()
4728 .filter(move |bracket_match| {
4729 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4730 bracket_range.overlaps(&range)
4731 })
4732 }
4733
4734 /// Returns bracket range pairs overlapping or adjacent to `range`
4735 pub fn bracket_ranges<T: ToOffset>(
4736 &self,
4737 range: Range<T>,
4738 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4739 // Find bracket pairs that *inclusively* contain the given range.
4740 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4741 self.all_bracket_ranges(range)
4742 .filter(|pair| !pair.newline_only)
4743 }
4744
4745 pub fn debug_variables_query<T: ToOffset>(
4746 &self,
4747 range: Range<T>,
4748 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4749 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4750
4751 let mut matches = self.syntax.matches_with_options(
4752 range.clone(),
4753 &self.text,
4754 TreeSitterOptions::default(),
4755 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4756 );
4757
4758 let configs = matches
4759 .grammars()
4760 .iter()
4761 .map(|grammar| grammar.debug_variables_config.as_ref())
4762 .collect::<Vec<_>>();
4763
4764 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4765
4766 iter::from_fn(move || {
4767 loop {
4768 while let Some(capture) = captures.pop() {
4769 if capture.0.overlaps(&range) {
4770 return Some(capture);
4771 }
4772 }
4773
4774 let mat = matches.peek()?;
4775
4776 let Some(config) = configs[mat.grammar_index].as_ref() else {
4777 matches.advance();
4778 continue;
4779 };
4780
4781 for capture in mat.captures {
4782 let Some(ix) = config
4783 .objects_by_capture_ix
4784 .binary_search_by_key(&capture.index, |e| e.0)
4785 .ok()
4786 else {
4787 continue;
4788 };
4789 let text_object = config.objects_by_capture_ix[ix].1;
4790 let byte_range = capture.node.byte_range();
4791
4792 let mut found = false;
4793 for (range, existing) in captures.iter_mut() {
4794 if existing == &text_object {
4795 range.start = range.start.min(byte_range.start);
4796 range.end = range.end.max(byte_range.end);
4797 found = true;
4798 break;
4799 }
4800 }
4801
4802 if !found {
4803 captures.push((byte_range, text_object));
4804 }
4805 }
4806
4807 matches.advance();
4808 }
4809 })
4810 }
4811
4812 pub fn text_object_ranges<T: ToOffset>(
4813 &self,
4814 range: Range<T>,
4815 options: TreeSitterOptions,
4816 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4817 let range =
4818 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4819
4820 let mut matches =
4821 self.syntax
4822 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4823 grammar.text_object_config.as_ref().map(|c| &c.query)
4824 });
4825
4826 let configs = matches
4827 .grammars()
4828 .iter()
4829 .map(|grammar| grammar.text_object_config.as_ref())
4830 .collect::<Vec<_>>();
4831
4832 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4833
4834 iter::from_fn(move || {
4835 loop {
4836 while let Some(capture) = captures.pop() {
4837 if capture.0.overlaps(&range) {
4838 return Some(capture);
4839 }
4840 }
4841
4842 let mat = matches.peek()?;
4843
4844 let Some(config) = configs[mat.grammar_index].as_ref() else {
4845 matches.advance();
4846 continue;
4847 };
4848
4849 for capture in mat.captures {
4850 let Some(ix) = config
4851 .text_objects_by_capture_ix
4852 .binary_search_by_key(&capture.index, |e| e.0)
4853 .ok()
4854 else {
4855 continue;
4856 };
4857 let text_object = config.text_objects_by_capture_ix[ix].1;
4858 let byte_range = capture.node.byte_range();
4859
4860 let mut found = false;
4861 for (range, existing) in captures.iter_mut() {
4862 if existing == &text_object {
4863 range.start = range.start.min(byte_range.start);
4864 range.end = range.end.max(byte_range.end);
4865 found = true;
4866 break;
4867 }
4868 }
4869
4870 if !found {
4871 captures.push((byte_range, text_object));
4872 }
4873 }
4874
4875 matches.advance();
4876 }
4877 })
4878 }
4879
4880 /// Returns enclosing bracket ranges containing the given range
4881 pub fn enclosing_bracket_ranges<T: ToOffset>(
4882 &self,
4883 range: Range<T>,
4884 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4885 let range = range.start.to_offset(self)..range.end.to_offset(self);
4886
4887 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4888 let max_depth = result
4889 .iter()
4890 .map(|mat| mat.syntax_layer_depth)
4891 .max()
4892 .unwrap_or(0);
4893 result.into_iter().filter(move |pair| {
4894 pair.open_range.start <= range.start
4895 && pair.close_range.end >= range.end
4896 && pair.syntax_layer_depth == max_depth
4897 })
4898 }
4899
4900 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4901 ///
4902 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4903 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4904 &self,
4905 range: Range<T>,
4906 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4907 ) -> Option<(Range<usize>, Range<usize>)> {
4908 let range = range.start.to_offset(self)..range.end.to_offset(self);
4909
4910 // Get the ranges of the innermost pair of brackets.
4911 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4912
4913 for pair in self.enclosing_bracket_ranges(range) {
4914 if let Some(range_filter) = range_filter
4915 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4916 {
4917 continue;
4918 }
4919
4920 let len = pair.close_range.end - pair.open_range.start;
4921
4922 if let Some((existing_open, existing_close)) = &result {
4923 let existing_len = existing_close.end - existing_open.start;
4924 if len > existing_len {
4925 continue;
4926 }
4927 }
4928
4929 result = Some((pair.open_range, pair.close_range));
4930 }
4931
4932 result
4933 }
4934
4935 /// Returns anchor ranges for any matches of the redaction query.
4936 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4937 /// will be run on the relevant section of the buffer.
4938 pub fn redacted_ranges<T: ToOffset>(
4939 &self,
4940 range: Range<T>,
4941 ) -> impl Iterator<Item = Range<usize>> + '_ {
4942 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4943 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4944 grammar
4945 .redactions_config
4946 .as_ref()
4947 .map(|config| &config.query)
4948 });
4949
4950 let configs = syntax_matches
4951 .grammars()
4952 .iter()
4953 .map(|grammar| grammar.redactions_config.as_ref())
4954 .collect::<Vec<_>>();
4955
4956 iter::from_fn(move || {
4957 let redacted_range = syntax_matches
4958 .peek()
4959 .and_then(|mat| {
4960 configs[mat.grammar_index].and_then(|config| {
4961 mat.captures
4962 .iter()
4963 .find(|capture| capture.index == config.redaction_capture_ix)
4964 })
4965 })
4966 .map(|mat| mat.node.byte_range());
4967 syntax_matches.advance();
4968 redacted_range
4969 })
4970 }
4971
4972 pub fn injections_intersecting_range<T: ToOffset>(
4973 &self,
4974 range: Range<T>,
4975 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4976 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4977
4978 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4979 grammar
4980 .injection_config
4981 .as_ref()
4982 .map(|config| &config.query)
4983 });
4984
4985 let configs = syntax_matches
4986 .grammars()
4987 .iter()
4988 .map(|grammar| grammar.injection_config.as_ref())
4989 .collect::<Vec<_>>();
4990
4991 iter::from_fn(move || {
4992 let ranges = syntax_matches.peek().and_then(|mat| {
4993 let config = &configs[mat.grammar_index]?;
4994 let content_capture_range = mat.captures.iter().find_map(|capture| {
4995 if capture.index == config.content_capture_ix {
4996 Some(capture.node.byte_range())
4997 } else {
4998 None
4999 }
5000 })?;
5001 let language = self.language_at(content_capture_range.start)?;
5002 Some((content_capture_range, language))
5003 });
5004 syntax_matches.advance();
5005 ranges
5006 })
5007 }
5008
5009 pub fn runnable_ranges(
5010 &self,
5011 offset_range: Range<usize>,
5012 ) -> impl Iterator<Item = RunnableRange> + '_ {
5013 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
5014 grammar.runnable_config.as_ref().map(|config| &config.query)
5015 });
5016
5017 let test_configs = syntax_matches
5018 .grammars()
5019 .iter()
5020 .map(|grammar| grammar.runnable_config.as_ref())
5021 .collect::<Vec<_>>();
5022
5023 iter::from_fn(move || {
5024 loop {
5025 let mat = syntax_matches.peek()?;
5026
5027 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
5028 let mut run_range = None;
5029 let full_range = mat.captures.iter().fold(
5030 Range {
5031 start: usize::MAX,
5032 end: 0,
5033 },
5034 |mut acc, next| {
5035 let byte_range = next.node.byte_range();
5036 if acc.start > byte_range.start {
5037 acc.start = byte_range.start;
5038 }
5039 if acc.end < byte_range.end {
5040 acc.end = byte_range.end;
5041 }
5042 acc
5043 },
5044 );
5045 if full_range.start > full_range.end {
5046 // We did not find a full spanning range of this match.
5047 return None;
5048 }
5049 let extra_captures: SmallVec<[_; 1]> =
5050 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
5051 test_configs
5052 .extra_captures
5053 .get(capture.index as usize)
5054 .cloned()
5055 .and_then(|tag_name| match tag_name {
5056 RunnableCapture::Named(name) => {
5057 Some((capture.node.byte_range(), name))
5058 }
5059 RunnableCapture::Run => {
5060 let _ = run_range.insert(capture.node.byte_range());
5061 None
5062 }
5063 })
5064 }));
5065 let run_range = run_range?;
5066 let tags = test_configs
5067 .query
5068 .property_settings(mat.pattern_index)
5069 .iter()
5070 .filter_map(|property| {
5071 if *property.key == *"tag" {
5072 property
5073 .value
5074 .as_ref()
5075 .map(|value| RunnableTag(value.to_string().into()))
5076 } else {
5077 None
5078 }
5079 })
5080 .collect();
5081 let extra_captures = extra_captures
5082 .into_iter()
5083 .map(|(range, name)| {
5084 (
5085 name.to_string(),
5086 self.text_for_range(range).collect::<String>(),
5087 )
5088 })
5089 .collect();
5090 // All tags should have the same range.
5091 Some(RunnableRange {
5092 run_range,
5093 full_range,
5094 runnable: Runnable {
5095 tags,
5096 language: mat.language,
5097 buffer: self.remote_id(),
5098 },
5099 extra_captures,
5100 buffer_id: self.remote_id(),
5101 })
5102 });
5103
5104 syntax_matches.advance();
5105 if test_range.is_some() {
5106 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5107 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5108 return test_range;
5109 }
5110 }
5111 })
5112 }
5113
5114 /// Returns selections for remote peers intersecting the given range.
5115 #[allow(clippy::type_complexity)]
5116 pub fn selections_in_range(
5117 &self,
5118 range: Range<Anchor>,
5119 include_local: bool,
5120 ) -> impl Iterator<
5121 Item = (
5122 ReplicaId,
5123 bool,
5124 CursorShape,
5125 impl Iterator<Item = &Selection<Anchor>> + '_,
5126 ),
5127 > + '_ {
5128 self.remote_selections
5129 .iter()
5130 .filter(move |(replica_id, set)| {
5131 (include_local || **replica_id != self.text.replica_id())
5132 && !set.selections.is_empty()
5133 })
5134 .map(move |(replica_id, set)| {
5135 let start_ix = match set.selections.binary_search_by(|probe| {
5136 probe.end.cmp(&range.start, self).then(Ordering::Greater)
5137 }) {
5138 Ok(ix) | Err(ix) => ix,
5139 };
5140 let end_ix = match set.selections.binary_search_by(|probe| {
5141 probe.start.cmp(&range.end, self).then(Ordering::Less)
5142 }) {
5143 Ok(ix) | Err(ix) => ix,
5144 };
5145
5146 (
5147 *replica_id,
5148 set.line_mode,
5149 set.cursor_shape,
5150 set.selections[start_ix..end_ix].iter(),
5151 )
5152 })
5153 }
5154
5155 /// Returns if the buffer contains any diagnostics.
5156 pub fn has_diagnostics(&self) -> bool {
5157 !self.diagnostics.is_empty()
5158 }
5159
5160 /// Returns all the diagnostics intersecting the given range.
5161 pub fn diagnostics_in_range<'a, T, O>(
5162 &'a self,
5163 search_range: Range<T>,
5164 reversed: bool,
5165 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5166 where
5167 T: 'a + Clone + ToOffset,
5168 O: 'a + FromAnchor,
5169 {
5170 let mut iterators: Vec<_> = self
5171 .diagnostics
5172 .iter()
5173 .map(|(_, collection)| {
5174 collection
5175 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5176 .peekable()
5177 })
5178 .collect();
5179
5180 std::iter::from_fn(move || {
5181 let (next_ix, _) = iterators
5182 .iter_mut()
5183 .enumerate()
5184 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5185 .min_by(|(_, a), (_, b)| {
5186 let cmp = a
5187 .range
5188 .start
5189 .cmp(&b.range.start, self)
5190 // when range is equal, sort by diagnostic severity
5191 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5192 // and stabilize order with group_id
5193 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5194 if reversed { cmp.reverse() } else { cmp }
5195 })?;
5196 iterators[next_ix]
5197 .next()
5198 .map(
5199 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5200 diagnostic,
5201 range: FromAnchor::from_anchor(&range.start, self)
5202 ..FromAnchor::from_anchor(&range.end, self),
5203 },
5204 )
5205 })
5206 }
5207
5208 /// Returns all the diagnostic groups associated with the given
5209 /// language server ID. If no language server ID is provided,
5210 /// all diagnostics groups are returned.
5211 pub fn diagnostic_groups(
5212 &self,
5213 language_server_id: Option<LanguageServerId>,
5214 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5215 let mut groups = Vec::new();
5216
5217 if let Some(language_server_id) = language_server_id {
5218 if let Some(set) = self.diagnostics.get(&language_server_id) {
5219 set.groups(language_server_id, &mut groups, self);
5220 }
5221 } else {
5222 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5223 diagnostics.groups(*language_server_id, &mut groups, self);
5224 }
5225 }
5226
5227 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5228 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5229 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5230 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5231 });
5232
5233 groups
5234 }
5235
5236 /// Returns an iterator over the diagnostics for the given group.
5237 pub fn diagnostic_group<O>(
5238 &self,
5239 group_id: usize,
5240 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5241 where
5242 O: FromAnchor + 'static,
5243 {
5244 self.diagnostics
5245 .iter()
5246 .flat_map(move |(_, set)| set.group(group_id, self))
5247 }
5248
5249 /// An integer version number that accounts for all updates besides
5250 /// the buffer's text itself (which is versioned via a version vector).
5251 pub fn non_text_state_update_count(&self) -> usize {
5252 self.non_text_state_update_count
5253 }
5254
5255 /// An integer version that changes when the buffer's syntax changes.
5256 pub fn syntax_update_count(&self) -> usize {
5257 self.syntax.update_count()
5258 }
5259
5260 /// Returns a snapshot of underlying file.
5261 pub fn file(&self) -> Option<&Arc<dyn File>> {
5262 self.file.as_ref()
5263 }
5264
5265 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5266 if let Some(file) = self.file() {
5267 if file.path().file_name().is_none() || include_root {
5268 Some(file.full_path(cx).to_string_lossy().into_owned())
5269 } else {
5270 Some(file.path().display(file.path_style(cx)).to_string())
5271 }
5272 } else {
5273 None
5274 }
5275 }
5276
5277 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5278 let query_str = query.fuzzy_contents;
5279 if query_str.is_some_and(|query| query.is_empty()) {
5280 return BTreeMap::default();
5281 }
5282
5283 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5284 language,
5285 override_id: None,
5286 }));
5287
5288 let mut query_ix = 0;
5289 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5290 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5291
5292 let mut words = BTreeMap::default();
5293 let mut current_word_start_ix = None;
5294 let mut chunk_ix = query.range.start;
5295 for chunk in self.chunks(
5296 query.range,
5297 LanguageAwareStyling {
5298 tree_sitter: false,
5299 diagnostics: false,
5300 },
5301 ) {
5302 for (i, c) in chunk.text.char_indices() {
5303 let ix = chunk_ix + i;
5304 if classifier.is_word(c) {
5305 if current_word_start_ix.is_none() {
5306 current_word_start_ix = Some(ix);
5307 }
5308
5309 if let Some(query_chars) = &query_chars
5310 && query_ix < query_len
5311 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5312 {
5313 query_ix += 1;
5314 }
5315 continue;
5316 } else if let Some(word_start) = current_word_start_ix.take()
5317 && query_ix == query_len
5318 {
5319 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5320 let mut word_text = self.text_for_range(word_start..ix).peekable();
5321 let first_char = word_text
5322 .peek()
5323 .and_then(|first_chunk| first_chunk.chars().next());
5324 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5325 if !query.skip_digits
5326 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5327 {
5328 words.insert(word_text.collect(), word_range);
5329 }
5330 }
5331 query_ix = 0;
5332 }
5333 chunk_ix += chunk.text.len();
5334 }
5335
5336 words
5337 }
5338}
5339
5340/// A configuration to use when producing styled text chunks.
5341#[derive(Clone, Copy)]
5342pub struct LanguageAwareStyling {
5343 /// Whether to highlight text chunks using tree-sitter.
5344 pub tree_sitter: bool,
5345 /// Whether to highlight text chunks based on the diagnostics data.
5346 pub diagnostics: bool,
5347}
5348
5349pub struct WordsQuery<'a> {
5350 /// Only returns words with all chars from the fuzzy string in them.
5351 pub fuzzy_contents: Option<&'a str>,
5352 /// Skips words that start with a digit.
5353 pub skip_digits: bool,
5354 /// Buffer offset range, to look for words.
5355 pub range: Range<usize>,
5356}
5357
5358fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5359 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5360}
5361
5362fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5363 let mut result = IndentSize::spaces(0);
5364 for c in text {
5365 let kind = match c {
5366 ' ' => IndentKind::Space,
5367 '\t' => IndentKind::Tab,
5368 _ => break,
5369 };
5370 if result.len == 0 {
5371 result.kind = kind;
5372 }
5373 result.len += 1;
5374 }
5375 result
5376}
5377
5378impl Clone for BufferSnapshot {
5379 fn clone(&self) -> Self {
5380 Self {
5381 text: self.text.clone(),
5382 syntax: self.syntax.clone(),
5383 file: self.file.clone(),
5384 remote_selections: self.remote_selections.clone(),
5385 diagnostics: self.diagnostics.clone(),
5386 language: self.language.clone(),
5387 tree_sitter_data: self.tree_sitter_data.clone(),
5388 non_text_state_update_count: self.non_text_state_update_count,
5389 capability: self.capability,
5390 modeline: self.modeline.clone(),
5391 }
5392 }
5393}
5394
5395impl Deref for BufferSnapshot {
5396 type Target = text::BufferSnapshot;
5397
5398 fn deref(&self) -> &Self::Target {
5399 &self.text
5400 }
5401}
5402
5403unsafe impl Send for BufferChunks<'_> {}
5404
5405impl<'a> BufferChunks<'a> {
5406 pub(crate) fn new(
5407 text: &'a Rope,
5408 range: Range<usize>,
5409 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5410 diagnostics: bool,
5411 buffer_snapshot: Option<&'a BufferSnapshot>,
5412 ) -> Self {
5413 let mut highlights = None;
5414 if let Some((captures, highlight_maps)) = syntax {
5415 highlights = Some(BufferChunkHighlights {
5416 captures,
5417 next_capture: None,
5418 stack: Default::default(),
5419 highlight_maps,
5420 })
5421 }
5422
5423 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5424 let chunks = text.chunks_in_range(range.clone());
5425
5426 let mut this = BufferChunks {
5427 range,
5428 buffer_snapshot,
5429 chunks,
5430 diagnostic_endpoints,
5431 error_depth: 0,
5432 warning_depth: 0,
5433 information_depth: 0,
5434 hint_depth: 0,
5435 unnecessary_depth: 0,
5436 underline: true,
5437 highlights,
5438 };
5439 this.initialize_diagnostic_endpoints();
5440 this
5441 }
5442
5443 /// Seeks to the given byte offset in the buffer.
5444 pub fn seek(&mut self, range: Range<usize>) {
5445 let old_range = std::mem::replace(&mut self.range, range.clone());
5446 self.chunks.set_range(self.range.clone());
5447 if let Some(highlights) = self.highlights.as_mut() {
5448 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5449 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5450 highlights
5451 .stack
5452 .retain(|(end_offset, _)| *end_offset > range.start);
5453 if let Some(capture) = &highlights.next_capture
5454 && range.start >= capture.node.start_byte()
5455 {
5456 let next_capture_end = capture.node.end_byte();
5457 if range.start < next_capture_end
5458 && let Some(capture_id) =
5459 highlights.highlight_maps[capture.grammar_index].get(capture.index)
5460 {
5461 highlights.stack.push((next_capture_end, capture_id));
5462 }
5463 highlights.next_capture.take();
5464 }
5465 } else if let Some(snapshot) = self.buffer_snapshot {
5466 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5467 *highlights = BufferChunkHighlights {
5468 captures,
5469 next_capture: None,
5470 stack: Default::default(),
5471 highlight_maps,
5472 };
5473 } else {
5474 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5475 // Seeking such BufferChunks is not supported.
5476 debug_assert!(
5477 false,
5478 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5479 );
5480 }
5481
5482 highlights.captures.set_byte_range(self.range.clone());
5483 self.initialize_diagnostic_endpoints();
5484 }
5485 }
5486
5487 fn initialize_diagnostic_endpoints(&mut self) {
5488 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5489 && let Some(buffer) = self.buffer_snapshot
5490 {
5491 let mut diagnostic_endpoints = Vec::new();
5492 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5493 diagnostic_endpoints.push(DiagnosticEndpoint {
5494 offset: entry.range.start,
5495 is_start: true,
5496 severity: entry.diagnostic.severity,
5497 is_unnecessary: entry.diagnostic.is_unnecessary,
5498 underline: entry.diagnostic.underline,
5499 });
5500 diagnostic_endpoints.push(DiagnosticEndpoint {
5501 offset: entry.range.end,
5502 is_start: false,
5503 severity: entry.diagnostic.severity,
5504 is_unnecessary: entry.diagnostic.is_unnecessary,
5505 underline: entry.diagnostic.underline,
5506 });
5507 }
5508 diagnostic_endpoints
5509 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5510 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5511 self.hint_depth = 0;
5512 self.error_depth = 0;
5513 self.warning_depth = 0;
5514 self.information_depth = 0;
5515 }
5516 }
5517
5518 /// The current byte offset in the buffer.
5519 pub fn offset(&self) -> usize {
5520 self.range.start
5521 }
5522
5523 pub fn range(&self) -> Range<usize> {
5524 self.range.clone()
5525 }
5526
5527 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5528 let depth = match endpoint.severity {
5529 DiagnosticSeverity::ERROR => &mut self.error_depth,
5530 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5531 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5532 DiagnosticSeverity::HINT => &mut self.hint_depth,
5533 _ => return,
5534 };
5535 if endpoint.is_start {
5536 *depth += 1;
5537 } else {
5538 *depth -= 1;
5539 }
5540
5541 if endpoint.is_unnecessary {
5542 if endpoint.is_start {
5543 self.unnecessary_depth += 1;
5544 } else {
5545 self.unnecessary_depth -= 1;
5546 }
5547 }
5548 }
5549
5550 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5551 if self.error_depth > 0 {
5552 Some(DiagnosticSeverity::ERROR)
5553 } else if self.warning_depth > 0 {
5554 Some(DiagnosticSeverity::WARNING)
5555 } else if self.information_depth > 0 {
5556 Some(DiagnosticSeverity::INFORMATION)
5557 } else if self.hint_depth > 0 {
5558 Some(DiagnosticSeverity::HINT)
5559 } else {
5560 None
5561 }
5562 }
5563
5564 fn current_code_is_unnecessary(&self) -> bool {
5565 self.unnecessary_depth > 0
5566 }
5567}
5568
5569impl<'a> Iterator for BufferChunks<'a> {
5570 type Item = Chunk<'a>;
5571
5572 fn next(&mut self) -> Option<Self::Item> {
5573 let mut next_capture_start = usize::MAX;
5574 let mut next_diagnostic_endpoint = usize::MAX;
5575
5576 if let Some(highlights) = self.highlights.as_mut() {
5577 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5578 if *parent_capture_end <= self.range.start {
5579 highlights.stack.pop();
5580 } else {
5581 break;
5582 }
5583 }
5584
5585 if highlights.next_capture.is_none() {
5586 highlights.next_capture = highlights.captures.next();
5587 }
5588
5589 while let Some(capture) = highlights.next_capture.as_ref() {
5590 if self.range.start < capture.node.start_byte() {
5591 next_capture_start = capture.node.start_byte();
5592 break;
5593 } else {
5594 let highlight_id =
5595 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5596 if let Some(highlight_id) = highlight_id {
5597 highlights
5598 .stack
5599 .push((capture.node.end_byte(), highlight_id));
5600 }
5601 highlights.next_capture = highlights.captures.next();
5602 }
5603 }
5604 }
5605
5606 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5607 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5608 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5609 if endpoint.offset <= self.range.start {
5610 self.update_diagnostic_depths(endpoint);
5611 diagnostic_endpoints.next();
5612 self.underline = endpoint.underline;
5613 } else {
5614 next_diagnostic_endpoint = endpoint.offset;
5615 break;
5616 }
5617 }
5618 }
5619 self.diagnostic_endpoints = diagnostic_endpoints;
5620
5621 if let Some(ChunkBitmaps {
5622 text: chunk,
5623 chars: chars_map,
5624 tabs,
5625 newlines,
5626 }) = self.chunks.peek_with_bitmaps()
5627 {
5628 let chunk_start = self.range.start;
5629 let mut chunk_end = (self.chunks.offset() + chunk.len())
5630 .min(next_capture_start)
5631 .min(next_diagnostic_endpoint);
5632 let mut highlight_id = None;
5633 if let Some(highlights) = self.highlights.as_ref()
5634 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5635 {
5636 chunk_end = chunk_end.min(*parent_capture_end);
5637 highlight_id = Some(*parent_highlight_id);
5638 }
5639 let bit_start = chunk_start - self.chunks.offset();
5640 let bit_end = chunk_end - self.chunks.offset();
5641
5642 let slice = &chunk[bit_start..bit_end];
5643
5644 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5645 let tabs = (tabs >> bit_start) & mask;
5646 let chars = (chars_map >> bit_start) & mask;
5647 let newlines = (newlines >> bit_start) & mask;
5648
5649 self.range.start = chunk_end;
5650 if self.range.start == self.chunks.offset() + chunk.len() {
5651 self.chunks.next().unwrap();
5652 }
5653
5654 Some(Chunk {
5655 text: slice,
5656 syntax_highlight_id: highlight_id,
5657 underline: self.underline,
5658 diagnostic_severity: self.current_diagnostic_severity(),
5659 is_unnecessary: self.current_code_is_unnecessary(),
5660 tabs,
5661 chars,
5662 newlines,
5663 ..Chunk::default()
5664 })
5665 } else {
5666 None
5667 }
5668 }
5669}
5670
5671impl operation_queue::Operation for Operation {
5672 fn lamport_timestamp(&self) -> clock::Lamport {
5673 match self {
5674 Operation::Buffer(_) => {
5675 unreachable!("buffer operations should never be deferred at this layer")
5676 }
5677 Operation::UpdateDiagnostics {
5678 lamport_timestamp, ..
5679 }
5680 | Operation::UpdateSelections {
5681 lamport_timestamp, ..
5682 }
5683 | Operation::UpdateCompletionTriggers {
5684 lamport_timestamp, ..
5685 }
5686 | Operation::UpdateLineEnding {
5687 lamport_timestamp, ..
5688 } => *lamport_timestamp,
5689 }
5690 }
5691}
5692
5693impl IndentSize {
5694 /// Returns an [`IndentSize`] representing the given spaces.
5695 pub fn spaces(len: u32) -> Self {
5696 Self {
5697 len,
5698 kind: IndentKind::Space,
5699 }
5700 }
5701
5702 /// Returns an [`IndentSize`] representing a tab.
5703 pub fn tab() -> Self {
5704 Self {
5705 len: 1,
5706 kind: IndentKind::Tab,
5707 }
5708 }
5709
5710 /// An iterator over the characters represented by this [`IndentSize`].
5711 pub fn chars(&self) -> impl Iterator<Item = char> {
5712 iter::repeat(self.char()).take(self.len as usize)
5713 }
5714
5715 /// The character representation of this [`IndentSize`].
5716 pub fn char(&self) -> char {
5717 match self.kind {
5718 IndentKind::Space => ' ',
5719 IndentKind::Tab => '\t',
5720 }
5721 }
5722
5723 /// Consumes the current [`IndentSize`] and returns a new one that has
5724 /// been shrunk or enlarged by the given size along the given direction.
5725 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5726 match direction {
5727 Ordering::Less => {
5728 if self.kind == size.kind && self.len >= size.len {
5729 self.len -= size.len;
5730 }
5731 }
5732 Ordering::Equal => {}
5733 Ordering::Greater => {
5734 if self.len == 0 {
5735 self = size;
5736 } else if self.kind == size.kind {
5737 self.len += size.len;
5738 }
5739 }
5740 }
5741 self
5742 }
5743
5744 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5745 match self.kind {
5746 IndentKind::Space => self.len as usize,
5747 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5748 }
5749 }
5750}
5751
5752#[cfg(any(test, feature = "test-support"))]
5753pub struct TestFile {
5754 pub path: Arc<RelPath>,
5755 pub root_name: String,
5756 pub local_root: Option<PathBuf>,
5757}
5758
5759#[cfg(any(test, feature = "test-support"))]
5760impl File for TestFile {
5761 fn path(&self) -> &Arc<RelPath> {
5762 &self.path
5763 }
5764
5765 fn full_path(&self, _: &gpui::App) -> PathBuf {
5766 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5767 }
5768
5769 fn as_local(&self) -> Option<&dyn LocalFile> {
5770 if self.local_root.is_some() {
5771 Some(self)
5772 } else {
5773 None
5774 }
5775 }
5776
5777 fn disk_state(&self) -> DiskState {
5778 unimplemented!()
5779 }
5780
5781 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5782 self.path().file_name().unwrap_or(self.root_name.as_ref())
5783 }
5784
5785 fn worktree_id(&self, _: &App) -> WorktreeId {
5786 WorktreeId::from_usize(0)
5787 }
5788
5789 fn to_proto(&self, _: &App) -> rpc::proto::File {
5790 unimplemented!()
5791 }
5792
5793 fn is_private(&self) -> bool {
5794 false
5795 }
5796
5797 fn path_style(&self, _cx: &App) -> PathStyle {
5798 PathStyle::local()
5799 }
5800}
5801
5802#[cfg(any(test, feature = "test-support"))]
5803impl LocalFile for TestFile {
5804 fn abs_path(&self, _cx: &App) -> PathBuf {
5805 PathBuf::from(self.local_root.as_ref().unwrap())
5806 .join(&self.root_name)
5807 .join(self.path.as_std_path())
5808 }
5809
5810 fn load(&self, _cx: &App) -> Task<Result<String>> {
5811 unimplemented!()
5812 }
5813
5814 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5815 unimplemented!()
5816 }
5817}
5818
5819pub(crate) fn contiguous_ranges(
5820 values: impl Iterator<Item = u32>,
5821 max_len: usize,
5822) -> impl Iterator<Item = Range<u32>> {
5823 let mut values = values;
5824 let mut current_range: Option<Range<u32>> = None;
5825 std::iter::from_fn(move || {
5826 loop {
5827 if let Some(value) = values.next() {
5828 if let Some(range) = &mut current_range
5829 && value == range.end
5830 && range.len() < max_len
5831 {
5832 range.end += 1;
5833 continue;
5834 }
5835
5836 let prev_range = current_range.clone();
5837 current_range = Some(value..(value + 1));
5838 if prev_range.is_some() {
5839 return prev_range;
5840 }
5841 } else {
5842 return current_range.take();
5843 }
5844 }
5845 })
5846}
5847
5848#[derive(Default, Debug)]
5849pub struct CharClassifier {
5850 scope: Option<LanguageScope>,
5851 scope_context: Option<CharScopeContext>,
5852 ignore_punctuation: bool,
5853}
5854
5855impl CharClassifier {
5856 pub fn new(scope: Option<LanguageScope>) -> Self {
5857 Self {
5858 scope,
5859 scope_context: None,
5860 ignore_punctuation: false,
5861 }
5862 }
5863
5864 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5865 Self {
5866 scope_context,
5867 ..self
5868 }
5869 }
5870
5871 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5872 Self {
5873 ignore_punctuation,
5874 ..self
5875 }
5876 }
5877
5878 pub fn is_whitespace(&self, c: char) -> bool {
5879 self.kind(c) == CharKind::Whitespace
5880 }
5881
5882 pub fn is_word(&self, c: char) -> bool {
5883 self.kind(c) == CharKind::Word
5884 }
5885
5886 pub fn is_punctuation(&self, c: char) -> bool {
5887 self.kind(c) == CharKind::Punctuation
5888 }
5889
5890 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5891 if c.is_alphanumeric() || c == '_' {
5892 return CharKind::Word;
5893 }
5894
5895 if let Some(scope) = &self.scope {
5896 let characters = match self.scope_context {
5897 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5898 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5899 None => scope.word_characters(),
5900 };
5901 if let Some(characters) = characters
5902 && characters.contains(&c)
5903 {
5904 return CharKind::Word;
5905 }
5906 }
5907
5908 if c.is_whitespace() {
5909 return CharKind::Whitespace;
5910 }
5911
5912 if ignore_punctuation {
5913 CharKind::Word
5914 } else {
5915 CharKind::Punctuation
5916 }
5917 }
5918
5919 pub fn kind(&self, c: char) -> CharKind {
5920 self.kind_with(c, self.ignore_punctuation)
5921 }
5922}
5923
5924/// Find all of the ranges of whitespace that occur at the ends of lines
5925/// in the given rope.
5926///
5927/// This could also be done with a regex search, but this implementation
5928/// avoids copying text.
5929pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5930 let mut ranges = Vec::new();
5931
5932 let mut offset = 0;
5933 let mut prev_chunk_trailing_whitespace_range = 0..0;
5934 for chunk in rope.chunks() {
5935 let mut prev_line_trailing_whitespace_range = 0..0;
5936 for (i, line) in chunk.split('\n').enumerate() {
5937 let line_end_offset = offset + line.len();
5938 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5939 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5940
5941 if i == 0 && trimmed_line_len == 0 {
5942 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5943 }
5944 if !prev_line_trailing_whitespace_range.is_empty() {
5945 ranges.push(prev_line_trailing_whitespace_range);
5946 }
5947
5948 offset = line_end_offset + 1;
5949 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5950 }
5951
5952 offset -= 1;
5953 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5954 }
5955
5956 if !prev_chunk_trailing_whitespace_range.is_empty() {
5957 ranges.push(prev_chunk_trailing_whitespace_range);
5958 }
5959
5960 ranges
5961}