1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, ModelineSettings, Outline, OutlineConfig, PLAIN_TEXT,
5 RunnableCapture, RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::LanguageSettings,
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::Arc,
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a mutable replica, but toggled to be only readable.
85 Read,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90impl Capability {
91 /// Returns `true` if the capability is `ReadWrite`.
92 pub fn editable(self) -> bool {
93 matches!(self, Capability::ReadWrite)
94 }
95}
96
97pub type BufferRow = u32;
98
99/// An in-memory representation of a source code file, including its text,
100/// syntax trees, git status, and diagnostics.
101pub struct Buffer {
102 text: TextBuffer,
103 branch_state: Option<BufferBranchState>,
104 /// Filesystem state, `None` when there is no path.
105 file: Option<Arc<dyn File>>,
106 /// The mtime of the file when this buffer was last loaded from
107 /// or saved to disk.
108 saved_mtime: Option<MTime>,
109 /// The version vector when this buffer was last loaded from
110 /// or saved to disk.
111 saved_version: clock::Global,
112 preview_version: clock::Global,
113 transaction_depth: usize,
114 was_dirty_before_starting_transaction: Option<bool>,
115 reload_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 autoindent_requests: Vec<Arc<AutoindentRequest>>,
118 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
119 pending_autoindent: Option<Task<()>>,
120 sync_parse_timeout: Option<Duration>,
121 syntax_map: Mutex<SyntaxMap>,
122 reparse: Option<Task<()>>,
123 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
124 non_text_state_update_count: usize,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 diagnostics_timestamp: clock::Lamport,
128 completion_triggers: BTreeSet<String>,
129 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
130 completion_triggers_timestamp: clock::Lamport,
131 deferred_ops: OperationQueue<Operation>,
132 capability: Capability,
133 has_conflict: bool,
134 /// Memoize calls to has_changes_since(saved_version).
135 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
136 has_unsaved_edits: Cell<(clock::Global, bool)>,
137 change_bits: Vec<rc::Weak<Cell<bool>>>,
138 modeline: Option<Arc<ModelineSettings>>,
139 _subscriptions: Vec<gpui::Subscription>,
140 tree_sitter_data: Arc<TreeSitterData>,
141 encoding: &'static Encoding,
142 has_bom: bool,
143}
144
145#[derive(Debug)]
146pub struct TreeSitterData {
147 chunks: RowChunks,
148 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
149}
150
151const MAX_ROWS_IN_A_CHUNK: u32 = 50;
152
153impl TreeSitterData {
154 fn clear(&mut self, snapshot: text::BufferSnapshot) {
155 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
156 self.brackets_by_chunks.get_mut().clear();
157 self.brackets_by_chunks
158 .get_mut()
159 .resize(self.chunks.len(), None);
160 }
161
162 fn new(snapshot: text::BufferSnapshot) -> Self {
163 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
164 Self {
165 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
166 chunks,
167 }
168 }
169
170 fn version(&self) -> &clock::Global {
171 self.chunks.version()
172 }
173}
174
175#[derive(Copy, Clone, Debug, PartialEq, Eq)]
176pub enum ParseStatus {
177 Idle,
178 Parsing,
179}
180
181struct BufferBranchState {
182 base_buffer: Entity<Buffer>,
183 merged_operations: Vec<Lamport>,
184}
185
186/// An immutable, cheaply cloneable representation of a fixed
187/// state of a buffer.
188pub struct BufferSnapshot {
189 pub text: text::BufferSnapshot,
190 pub syntax: SyntaxSnapshot,
191 file: Option<Arc<dyn File>>,
192 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
193 remote_selections: TreeMap<ReplicaId, SelectionSet>,
194 language: Option<Arc<Language>>,
195 non_text_state_update_count: usize,
196 tree_sitter_data: Arc<TreeSitterData>,
197 pub capability: Capability,
198 modeline: Option<Arc<ModelineSettings>>,
199}
200
201/// The kind and amount of indentation in a particular line. For now,
202/// assumes that indentation is all the same character.
203#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
204pub struct IndentSize {
205 /// The number of bytes that comprise the indentation.
206 pub len: u32,
207 /// The kind of whitespace used for indentation.
208 pub kind: IndentKind,
209}
210
211/// A whitespace character that's used for indentation.
212#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
213pub enum IndentKind {
214 /// An ASCII space character.
215 #[default]
216 Space,
217 /// An ASCII tab character.
218 Tab,
219}
220
221/// The shape of a selection cursor.
222#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
223pub enum CursorShape {
224 /// A vertical bar
225 #[default]
226 Bar,
227 /// A block that surrounds the following character
228 Block,
229 /// An underline that runs along the following character
230 Underline,
231 /// A box drawn around the following character
232 Hollow,
233}
234
235impl From<settings::CursorShape> for CursorShape {
236 fn from(shape: settings::CursorShape) -> Self {
237 match shape {
238 settings::CursorShape::Bar => CursorShape::Bar,
239 settings::CursorShape::Block => CursorShape::Block,
240 settings::CursorShape::Underline => CursorShape::Underline,
241 settings::CursorShape::Hollow => CursorShape::Hollow,
242 }
243 }
244}
245
246#[derive(Clone, Debug)]
247struct SelectionSet {
248 line_mode: bool,
249 cursor_shape: CursorShape,
250 selections: Arc<[Selection<Anchor>]>,
251 lamport_timestamp: clock::Lamport,
252}
253
254/// A diagnostic associated with a certain range of a buffer.
255#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
256pub struct Diagnostic {
257 /// The name of the service that produced this diagnostic.
258 pub source: Option<String>,
259 /// The ID provided by the dynamic registration that produced this diagnostic.
260 pub registration_id: Option<SharedString>,
261 /// A machine-readable code that identifies this diagnostic.
262 pub code: Option<NumberOrString>,
263 pub code_description: Option<lsp::Uri>,
264 /// Whether this diagnostic is a hint, warning, or error.
265 pub severity: DiagnosticSeverity,
266 /// The human-readable message associated with this diagnostic.
267 pub message: String,
268 /// The human-readable message (in markdown format)
269 pub markdown: Option<String>,
270 /// An id that identifies the group to which this diagnostic belongs.
271 ///
272 /// When a language server produces a diagnostic with
273 /// one or more associated diagnostics, those diagnostics are all
274 /// assigned a single group ID.
275 pub group_id: usize,
276 /// Whether this diagnostic is the primary diagnostic for its group.
277 ///
278 /// In a given group, the primary diagnostic is the top-level diagnostic
279 /// returned by the language server. The non-primary diagnostics are the
280 /// associated diagnostics.
281 pub is_primary: bool,
282 /// Whether this diagnostic is considered to originate from an analysis of
283 /// files on disk, as opposed to any unsaved buffer contents. This is a
284 /// property of a given diagnostic source, and is configured for a given
285 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
286 /// for the language server.
287 pub is_disk_based: bool,
288 /// Whether this diagnostic marks unnecessary code.
289 pub is_unnecessary: bool,
290 /// Quick separation of diagnostics groups based by their source.
291 pub source_kind: DiagnosticSourceKind,
292 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
293 pub data: Option<Value>,
294 /// Whether to underline the corresponding text range in the editor.
295 pub underline: bool,
296}
297
298#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
299pub enum DiagnosticSourceKind {
300 Pulled,
301 Pushed,
302 Other,
303}
304
305/// An operation used to synchronize this buffer with its other replicas.
306#[derive(Clone, Debug, PartialEq)]
307pub enum Operation {
308 /// A text operation.
309 Buffer(text::Operation),
310
311 /// An update to the buffer's diagnostics.
312 UpdateDiagnostics {
313 /// The id of the language server that produced the new diagnostics.
314 server_id: LanguageServerId,
315 /// The diagnostics.
316 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
317 /// The buffer's lamport timestamp.
318 lamport_timestamp: clock::Lamport,
319 },
320
321 /// An update to the most recent selections in this buffer.
322 UpdateSelections {
323 /// The selections.
324 selections: Arc<[Selection<Anchor>]>,
325 /// The buffer's lamport timestamp.
326 lamport_timestamp: clock::Lamport,
327 /// Whether the selections are in 'line mode'.
328 line_mode: bool,
329 /// The [`CursorShape`] associated with these selections.
330 cursor_shape: CursorShape,
331 },
332
333 /// An update to the characters that should trigger autocompletion
334 /// for this buffer.
335 UpdateCompletionTriggers {
336 /// The characters that trigger autocompletion.
337 triggers: Vec<String>,
338 /// The buffer's lamport timestamp.
339 lamport_timestamp: clock::Lamport,
340 /// The language server ID.
341 server_id: LanguageServerId,
342 },
343
344 /// An update to the line ending type of this buffer.
345 UpdateLineEnding {
346 /// The line ending type.
347 line_ending: LineEnding,
348 /// The buffer's lamport timestamp.
349 lamport_timestamp: clock::Lamport,
350 },
351}
352
353/// An event that occurs in a buffer.
354#[derive(Clone, Debug, PartialEq)]
355pub enum BufferEvent {
356 /// The buffer was changed in a way that must be
357 /// propagated to its other replicas.
358 Operation {
359 operation: Operation,
360 is_local: bool,
361 },
362 /// The buffer was edited.
363 Edited,
364 /// The buffer's `dirty` bit changed.
365 DirtyChanged,
366 /// The buffer was saved.
367 Saved,
368 /// The buffer's file was changed on disk.
369 FileHandleChanged,
370 /// The buffer was reloaded.
371 Reloaded,
372 /// The buffer is in need of a reload
373 ReloadNeeded,
374 /// The buffer's language was changed.
375 /// The boolean indicates whether this buffer did not have a language before, but does now.
376 LanguageChanged(bool),
377 /// The buffer's syntax trees were updated.
378 Reparsed,
379 /// The buffer's diagnostics were updated.
380 DiagnosticsUpdated,
381 /// The buffer gained or lost editing capabilities.
382 CapabilityChanged,
383}
384
385/// The file associated with a buffer.
386pub trait File: Send + Sync + Any {
387 /// Returns the [`LocalFile`] associated with this file, if the
388 /// file is local.
389 fn as_local(&self) -> Option<&dyn LocalFile>;
390
391 /// Returns whether this file is local.
392 fn is_local(&self) -> bool {
393 self.as_local().is_some()
394 }
395
396 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
397 /// only available in some states, such as modification time.
398 fn disk_state(&self) -> DiskState;
399
400 /// Returns the path of this file relative to the worktree's root directory.
401 fn path(&self) -> &Arc<RelPath>;
402
403 /// Returns the path of this file relative to the worktree's parent directory (this means it
404 /// includes the name of the worktree's root folder).
405 fn full_path(&self, cx: &App) -> PathBuf;
406
407 /// Returns the path style of this file.
408 fn path_style(&self, cx: &App) -> PathStyle;
409
410 /// Returns the last component of this handle's absolute path. If this handle refers to the root
411 /// of its worktree, then this method will return the name of the worktree itself.
412 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
413
414 /// Returns the id of the worktree to which this file belongs.
415 ///
416 /// This is needed for looking up project-specific settings.
417 fn worktree_id(&self, cx: &App) -> WorktreeId;
418
419 /// Converts this file into a protobuf message.
420 fn to_proto(&self, cx: &App) -> rpc::proto::File;
421
422 /// Return whether Zed considers this to be a private file.
423 fn is_private(&self) -> bool;
424
425 fn can_open(&self) -> bool {
426 !self.is_local()
427 }
428}
429
430/// The file's storage status - whether it's stored (`Present`), and if so when it was last
431/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
432/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
433/// indicator for new files.
434#[derive(Copy, Clone, Debug, PartialEq)]
435pub enum DiskState {
436 /// File created in Zed that has not been saved.
437 New,
438 /// File present on the filesystem.
439 Present { mtime: MTime },
440 /// Deleted file that was previously present.
441 Deleted,
442 /// An old version of a file that was previously present
443 /// usually from a version control system. e.g. A git blob
444 Historic { was_deleted: bool },
445}
446
447impl DiskState {
448 /// Returns the file's last known modification time on disk.
449 pub fn mtime(self) -> Option<MTime> {
450 match self {
451 DiskState::New => None,
452 DiskState::Present { mtime } => Some(mtime),
453 DiskState::Deleted => None,
454 DiskState::Historic { .. } => None,
455 }
456 }
457
458 pub fn exists(&self) -> bool {
459 match self {
460 DiskState::New => false,
461 DiskState::Present { .. } => true,
462 DiskState::Deleted => false,
463 DiskState::Historic { .. } => false,
464 }
465 }
466
467 /// Returns true if this state represents a deleted file.
468 pub fn is_deleted(&self) -> bool {
469 match self {
470 DiskState::Deleted => true,
471 DiskState::Historic { was_deleted } => *was_deleted,
472 _ => false,
473 }
474 }
475}
476
477/// The file associated with a buffer, in the case where the file is on the local disk.
478pub trait LocalFile: File {
479 /// Returns the absolute path of this file
480 fn abs_path(&self, cx: &App) -> PathBuf;
481
482 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
483 fn load(&self, cx: &App) -> Task<Result<String>>;
484
485 /// Loads the file's contents from disk.
486 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
487}
488
489/// The auto-indent behavior associated with an editing operation.
490/// For some editing operations, each affected line of text has its
491/// indentation recomputed. For other operations, the entire block
492/// of edited text is adjusted uniformly.
493#[derive(Clone, Debug)]
494pub enum AutoindentMode {
495 /// Indent each line of inserted text.
496 EachLine,
497 /// Apply the same indentation adjustment to all of the lines
498 /// in a given insertion.
499 Block {
500 /// The original indentation column of the first line of each
501 /// insertion, if it has been copied.
502 ///
503 /// Knowing this makes it possible to preserve the relative indentation
504 /// of every line in the insertion from when it was copied.
505 ///
506 /// If the original indent column is `a`, and the first line of insertion
507 /// is then auto-indented to column `b`, then every other line of
508 /// the insertion will be auto-indented to column `b - a`
509 original_indent_columns: Vec<Option<u32>>,
510 },
511}
512
513#[derive(Clone)]
514struct AutoindentRequest {
515 before_edit: BufferSnapshot,
516 entries: Vec<AutoindentRequestEntry>,
517 is_block_mode: bool,
518 ignore_empty_lines: bool,
519}
520
521#[derive(Debug, Clone)]
522struct AutoindentRequestEntry {
523 /// A range of the buffer whose indentation should be adjusted.
524 range: Range<Anchor>,
525 /// The row of the edit start in the buffer before the edit was applied.
526 /// This is stored here because the anchor in range is created after
527 /// the edit, so it cannot be used with the before_edit snapshot.
528 old_row: Option<u32>,
529 indent_size: IndentSize,
530 original_indent_column: Option<u32>,
531}
532
533#[derive(Debug)]
534struct IndentSuggestion {
535 basis_row: u32,
536 delta: Ordering,
537 within_error: bool,
538}
539
540struct BufferChunkHighlights<'a> {
541 captures: SyntaxMapCaptures<'a>,
542 next_capture: Option<SyntaxMapCapture<'a>>,
543 stack: Vec<(usize, HighlightId)>,
544 highlight_maps: Vec<HighlightMap>,
545}
546
547/// An iterator that yields chunks of a buffer's text, along with their
548/// syntax highlights and diagnostic status.
549pub struct BufferChunks<'a> {
550 buffer_snapshot: Option<&'a BufferSnapshot>,
551 range: Range<usize>,
552 chunks: text::Chunks<'a>,
553 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
554 error_depth: usize,
555 warning_depth: usize,
556 information_depth: usize,
557 hint_depth: usize,
558 unnecessary_depth: usize,
559 underline: bool,
560 highlights: Option<BufferChunkHighlights<'a>>,
561}
562
563/// A chunk of a buffer's text, along with its syntax highlight and
564/// diagnostic status.
565#[derive(Clone, Debug, Default)]
566pub struct Chunk<'a> {
567 /// The text of the chunk.
568 pub text: &'a str,
569 /// The syntax highlighting style of the chunk.
570 pub syntax_highlight_id: Option<HighlightId>,
571 /// The highlight style that has been applied to this chunk in
572 /// the editor.
573 pub highlight_style: Option<HighlightStyle>,
574 /// The severity of diagnostic associated with this chunk, if any.
575 pub diagnostic_severity: Option<DiagnosticSeverity>,
576 /// A bitset of which characters are tabs in this string.
577 pub tabs: u128,
578 /// Bitmap of character indices in this chunk
579 pub chars: u128,
580 /// Whether this chunk of text is marked as unnecessary.
581 pub is_unnecessary: bool,
582 /// Whether this chunk of text was originally a tab character.
583 pub is_tab: bool,
584 /// Whether this chunk of text was originally an inlay.
585 pub is_inlay: bool,
586 /// Whether to underline the corresponding text range in the editor.
587 pub underline: bool,
588}
589
590/// A set of edits to a given version of a buffer, computed asynchronously.
591#[derive(Debug, Clone)]
592pub struct Diff {
593 pub base_version: clock::Global,
594 pub line_ending: LineEnding,
595 pub edits: Vec<(Range<usize>, Arc<str>)>,
596}
597
598#[derive(Debug, Clone, Copy)]
599pub(crate) struct DiagnosticEndpoint {
600 offset: usize,
601 is_start: bool,
602 underline: bool,
603 severity: DiagnosticSeverity,
604 is_unnecessary: bool,
605}
606
607/// A class of characters, used for characterizing a run of text.
608#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
609pub enum CharKind {
610 /// Whitespace.
611 Whitespace,
612 /// Punctuation.
613 Punctuation,
614 /// Word.
615 Word,
616}
617
618/// Context for character classification within a specific scope.
619#[derive(Copy, Clone, Eq, PartialEq, Debug)]
620pub enum CharScopeContext {
621 /// Character classification for completion queries.
622 ///
623 /// This context treats certain characters as word constituents that would
624 /// normally be considered punctuation, such as '-' in Tailwind classes
625 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
626 Completion,
627 /// Character classification for linked edits.
628 ///
629 /// This context handles characters that should be treated as part of
630 /// identifiers during linked editing operations, such as '.' in JSX
631 /// component names like `<Animated.View>`.
632 LinkedEdit,
633}
634
635/// A runnable is a set of data about a region that could be resolved into a task
636pub struct Runnable {
637 pub tags: SmallVec<[RunnableTag; 1]>,
638 pub language: Arc<Language>,
639 pub buffer: BufferId,
640}
641
642#[derive(Default, Clone, Debug)]
643pub struct HighlightedText {
644 pub text: SharedString,
645 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
646}
647
648#[derive(Default, Debug)]
649struct HighlightedTextBuilder {
650 pub text: String,
651 highlights: Vec<(Range<usize>, HighlightStyle)>,
652}
653
654impl HighlightedText {
655 pub fn from_buffer_range<T: ToOffset>(
656 range: Range<T>,
657 snapshot: &text::BufferSnapshot,
658 syntax_snapshot: &SyntaxSnapshot,
659 override_style: Option<HighlightStyle>,
660 syntax_theme: &SyntaxTheme,
661 ) -> Self {
662 let mut highlighted_text = HighlightedTextBuilder::default();
663 highlighted_text.add_text_from_buffer_range(
664 range,
665 snapshot,
666 syntax_snapshot,
667 override_style,
668 syntax_theme,
669 );
670 highlighted_text.build()
671 }
672
673 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
674 gpui::StyledText::new(self.text.clone())
675 .with_default_highlights(default_style, self.highlights.iter().cloned())
676 }
677
678 /// Returns the first line without leading whitespace unless highlighted
679 /// and a boolean indicating if there are more lines after
680 pub fn first_line_preview(self) -> (Self, bool) {
681 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
682 let first_line = &self.text[..newline_ix];
683
684 // Trim leading whitespace, unless an edit starts prior to it.
685 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
686 if let Some((first_highlight_range, _)) = self.highlights.first() {
687 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
688 }
689
690 let preview_text = &first_line[preview_start_ix..];
691 let preview_highlights = self
692 .highlights
693 .into_iter()
694 .skip_while(|(range, _)| range.end <= preview_start_ix)
695 .take_while(|(range, _)| range.start < newline_ix)
696 .filter_map(|(mut range, highlight)| {
697 range.start = range.start.saturating_sub(preview_start_ix);
698 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
699 if range.is_empty() {
700 None
701 } else {
702 Some((range, highlight))
703 }
704 });
705
706 let preview = Self {
707 text: SharedString::new(preview_text),
708 highlights: preview_highlights.collect(),
709 };
710
711 (preview, self.text.len() > newline_ix)
712 }
713}
714
715impl HighlightedTextBuilder {
716 pub fn build(self) -> HighlightedText {
717 HighlightedText {
718 text: self.text.into(),
719 highlights: self.highlights,
720 }
721 }
722
723 pub fn add_text_from_buffer_range<T: ToOffset>(
724 &mut self,
725 range: Range<T>,
726 snapshot: &text::BufferSnapshot,
727 syntax_snapshot: &SyntaxSnapshot,
728 override_style: Option<HighlightStyle>,
729 syntax_theme: &SyntaxTheme,
730 ) {
731 let range = range.to_offset(snapshot);
732 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
733 let start = self.text.len();
734 self.text.push_str(chunk.text);
735 let end = self.text.len();
736
737 if let Some(highlight_style) = chunk
738 .syntax_highlight_id
739 .and_then(|id| id.style(syntax_theme))
740 {
741 let highlight_style = override_style.map_or(highlight_style, |override_style| {
742 highlight_style.highlight(override_style)
743 });
744 self.highlights.push((start..end, highlight_style));
745 } else if let Some(override_style) = override_style {
746 self.highlights.push((start..end, override_style));
747 }
748 }
749 }
750
751 fn highlighted_chunks<'a>(
752 range: Range<usize>,
753 snapshot: &'a text::BufferSnapshot,
754 syntax_snapshot: &'a SyntaxSnapshot,
755 ) -> BufferChunks<'a> {
756 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
757 grammar
758 .highlights_config
759 .as_ref()
760 .map(|config| &config.query)
761 });
762
763 let highlight_maps = captures
764 .grammars()
765 .iter()
766 .map(|grammar| grammar.highlight_map())
767 .collect();
768
769 BufferChunks::new(
770 snapshot.as_rope(),
771 range,
772 Some((captures, highlight_maps)),
773 false,
774 None,
775 )
776 }
777}
778
779#[derive(Clone)]
780pub struct EditPreview {
781 old_snapshot: text::BufferSnapshot,
782 applied_edits_snapshot: text::BufferSnapshot,
783 syntax_snapshot: SyntaxSnapshot,
784}
785
786impl EditPreview {
787 pub fn as_unified_diff(
788 &self,
789 file: Option<&Arc<dyn File>>,
790 edits: &[(Range<Anchor>, impl AsRef<str>)],
791 ) -> Option<String> {
792 let (first, _) = edits.first()?;
793 let (last, _) = edits.last()?;
794
795 let start = first.start.to_point(&self.old_snapshot);
796 let old_end = last.end.to_point(&self.old_snapshot);
797 let new_end = last
798 .end
799 .bias_right(&self.old_snapshot)
800 .to_point(&self.applied_edits_snapshot);
801
802 let start = Point::new(start.row.saturating_sub(3), 0);
803 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
804 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
805
806 let diff_body = unified_diff_with_offsets(
807 &self
808 .old_snapshot
809 .text_for_range(start..old_end)
810 .collect::<String>(),
811 &self
812 .applied_edits_snapshot
813 .text_for_range(start..new_end)
814 .collect::<String>(),
815 start.row,
816 start.row,
817 );
818
819 let path = file.map(|f| f.path().as_unix_str());
820 let header = match path {
821 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
822 None => String::new(),
823 };
824
825 Some(format!("{}{}", header, diff_body))
826 }
827
828 pub fn highlight_edits(
829 &self,
830 current_snapshot: &BufferSnapshot,
831 edits: &[(Range<Anchor>, impl AsRef<str>)],
832 include_deletions: bool,
833 cx: &App,
834 ) -> HighlightedText {
835 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
836 return HighlightedText::default();
837 };
838
839 let mut highlighted_text = HighlightedTextBuilder::default();
840
841 let visible_range_in_preview_snapshot =
842 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
843 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
844
845 let insertion_highlight_style = HighlightStyle {
846 background_color: Some(cx.theme().status().created_background),
847 ..Default::default()
848 };
849 let deletion_highlight_style = HighlightStyle {
850 background_color: Some(cx.theme().status().deleted_background),
851 ..Default::default()
852 };
853 let syntax_theme = cx.theme().syntax();
854
855 for (range, edit_text) in edits {
856 let edit_new_end_in_preview_snapshot = range
857 .end
858 .bias_right(&self.old_snapshot)
859 .to_offset(&self.applied_edits_snapshot);
860 let edit_start_in_preview_snapshot =
861 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
862
863 let unchanged_range_in_preview_snapshot =
864 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
865 if !unchanged_range_in_preview_snapshot.is_empty() {
866 highlighted_text.add_text_from_buffer_range(
867 unchanged_range_in_preview_snapshot,
868 &self.applied_edits_snapshot,
869 &self.syntax_snapshot,
870 None,
871 syntax_theme,
872 );
873 }
874
875 let range_in_current_snapshot = range.to_offset(current_snapshot);
876 if include_deletions && !range_in_current_snapshot.is_empty() {
877 highlighted_text.add_text_from_buffer_range(
878 range_in_current_snapshot,
879 ¤t_snapshot.text,
880 ¤t_snapshot.syntax,
881 Some(deletion_highlight_style),
882 syntax_theme,
883 );
884 }
885
886 if !edit_text.as_ref().is_empty() {
887 highlighted_text.add_text_from_buffer_range(
888 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
889 &self.applied_edits_snapshot,
890 &self.syntax_snapshot,
891 Some(insertion_highlight_style),
892 syntax_theme,
893 );
894 }
895
896 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
897 }
898
899 highlighted_text.add_text_from_buffer_range(
900 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
901 &self.applied_edits_snapshot,
902 &self.syntax_snapshot,
903 None,
904 syntax_theme,
905 );
906
907 highlighted_text.build()
908 }
909
910 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
911 cx.new(|cx| {
912 let mut buffer = Buffer::local_normalized(
913 self.applied_edits_snapshot.as_rope().clone(),
914 self.applied_edits_snapshot.line_ending(),
915 cx,
916 );
917 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
918 buffer
919 })
920 }
921
922 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
923 let (first, _) = edits.first()?;
924 let (last, _) = edits.last()?;
925
926 let start = first
927 .start
928 .bias_left(&self.old_snapshot)
929 .to_point(&self.applied_edits_snapshot);
930 let end = last
931 .end
932 .bias_right(&self.old_snapshot)
933 .to_point(&self.applied_edits_snapshot);
934
935 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
936 let range = Point::new(start.row, 0)
937 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
938
939 Some(range)
940 }
941}
942
943#[derive(Clone, Debug, PartialEq, Eq)]
944pub struct BracketMatch<T> {
945 pub open_range: Range<T>,
946 pub close_range: Range<T>,
947 pub newline_only: bool,
948 pub syntax_layer_depth: usize,
949 pub color_index: Option<usize>,
950}
951
952impl<T> BracketMatch<T> {
953 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
954 (self.open_range, self.close_range)
955 }
956}
957
958impl Buffer {
959 /// Create a new buffer with the given base text.
960 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
961 Self::build(
962 TextBuffer::new(
963 ReplicaId::LOCAL,
964 cx.entity_id().as_non_zero_u64().into(),
965 base_text.into(),
966 ),
967 None,
968 Capability::ReadWrite,
969 )
970 }
971
972 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
973 pub fn local_normalized(
974 base_text_normalized: Rope,
975 line_ending: LineEnding,
976 cx: &Context<Self>,
977 ) -> Self {
978 Self::build(
979 TextBuffer::new_normalized(
980 ReplicaId::LOCAL,
981 cx.entity_id().as_non_zero_u64().into(),
982 line_ending,
983 base_text_normalized,
984 ),
985 None,
986 Capability::ReadWrite,
987 )
988 }
989
990 /// Create a new buffer that is a replica of a remote buffer.
991 pub fn remote(
992 remote_id: BufferId,
993 replica_id: ReplicaId,
994 capability: Capability,
995 base_text: impl Into<String>,
996 ) -> Self {
997 Self::build(
998 TextBuffer::new(replica_id, remote_id, base_text.into()),
999 None,
1000 capability,
1001 )
1002 }
1003
1004 /// Create a new buffer that is a replica of a remote buffer, populating its
1005 /// state from the given protobuf message.
1006 pub fn from_proto(
1007 replica_id: ReplicaId,
1008 capability: Capability,
1009 message: proto::BufferState,
1010 file: Option<Arc<dyn File>>,
1011 ) -> Result<Self> {
1012 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1013 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1014 let mut this = Self::build(buffer, file, capability);
1015 this.text.set_line_ending(proto::deserialize_line_ending(
1016 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1017 ));
1018 this.saved_version = proto::deserialize_version(&message.saved_version);
1019 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1020 Ok(this)
1021 }
1022
1023 /// Serialize the buffer's state to a protobuf message.
1024 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1025 proto::BufferState {
1026 id: self.remote_id().into(),
1027 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1028 base_text: self.base_text().to_string(),
1029 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1030 saved_version: proto::serialize_version(&self.saved_version),
1031 saved_mtime: self.saved_mtime.map(|time| time.into()),
1032 }
1033 }
1034
1035 /// Serialize as protobufs all of the changes to the buffer since the given version.
1036 pub fn serialize_ops(
1037 &self,
1038 since: Option<clock::Global>,
1039 cx: &App,
1040 ) -> Task<Vec<proto::Operation>> {
1041 let mut operations = Vec::new();
1042 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1043
1044 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1045 proto::serialize_operation(&Operation::UpdateSelections {
1046 selections: set.selections.clone(),
1047 lamport_timestamp: set.lamport_timestamp,
1048 line_mode: set.line_mode,
1049 cursor_shape: set.cursor_shape,
1050 })
1051 }));
1052
1053 for (server_id, diagnostics) in &self.diagnostics {
1054 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1055 lamport_timestamp: self.diagnostics_timestamp,
1056 server_id: *server_id,
1057 diagnostics: diagnostics.iter().cloned().collect(),
1058 }));
1059 }
1060
1061 for (server_id, completions) in &self.completion_triggers_per_language_server {
1062 operations.push(proto::serialize_operation(
1063 &Operation::UpdateCompletionTriggers {
1064 triggers: completions.iter().cloned().collect(),
1065 lamport_timestamp: self.completion_triggers_timestamp,
1066 server_id: *server_id,
1067 },
1068 ));
1069 }
1070
1071 let text_operations = self.text.operations().clone();
1072 cx.background_spawn(async move {
1073 let since = since.unwrap_or_default();
1074 operations.extend(
1075 text_operations
1076 .iter()
1077 .filter(|(_, op)| !since.observed(op.timestamp()))
1078 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1079 );
1080 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1081 operations
1082 })
1083 }
1084
1085 /// Assign a language to the buffer, returning the buffer.
1086 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1087 self.set_language_async(Some(language), cx);
1088 self
1089 }
1090
1091 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1092 #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1093 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1094 self.set_language(Some(language), cx);
1095 self
1096 }
1097
1098 /// Returns the [`Capability`] of this buffer.
1099 pub fn capability(&self) -> Capability {
1100 self.capability
1101 }
1102
1103 /// Whether this buffer can only be read.
1104 pub fn read_only(&self) -> bool {
1105 !self.capability.editable()
1106 }
1107
1108 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1109 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1110 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1111 let snapshot = buffer.snapshot();
1112 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1113 let tree_sitter_data = TreeSitterData::new(snapshot);
1114 Self {
1115 saved_mtime,
1116 tree_sitter_data: Arc::new(tree_sitter_data),
1117 saved_version: buffer.version(),
1118 preview_version: buffer.version(),
1119 reload_task: None,
1120 transaction_depth: 0,
1121 was_dirty_before_starting_transaction: None,
1122 has_unsaved_edits: Cell::new((buffer.version(), false)),
1123 text: buffer,
1124 branch_state: None,
1125 file,
1126 capability,
1127 syntax_map,
1128 reparse: None,
1129 non_text_state_update_count: 0,
1130 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1131 Some(Duration::from_millis(10))
1132 } else {
1133 Some(Duration::from_millis(1))
1134 },
1135 parse_status: watch::channel(ParseStatus::Idle),
1136 autoindent_requests: Default::default(),
1137 wait_for_autoindent_txs: Default::default(),
1138 pending_autoindent: Default::default(),
1139 language: None,
1140 remote_selections: Default::default(),
1141 diagnostics: Default::default(),
1142 diagnostics_timestamp: Lamport::MIN,
1143 completion_triggers: Default::default(),
1144 completion_triggers_per_language_server: Default::default(),
1145 completion_triggers_timestamp: Lamport::MIN,
1146 deferred_ops: OperationQueue::new(),
1147 has_conflict: false,
1148 change_bits: Default::default(),
1149 modeline: None,
1150 _subscriptions: Vec::new(),
1151 encoding: encoding_rs::UTF_8,
1152 has_bom: false,
1153 }
1154 }
1155
1156 pub fn build_snapshot(
1157 text: Rope,
1158 language: Option<Arc<Language>>,
1159 language_registry: Option<Arc<LanguageRegistry>>,
1160 modeline: Option<Arc<ModelineSettings>>,
1161 cx: &mut App,
1162 ) -> impl Future<Output = BufferSnapshot> + use<> {
1163 let entity_id = cx.reserve_entity::<Self>().entity_id();
1164 let buffer_id = entity_id.as_non_zero_u64().into();
1165 async move {
1166 let text =
1167 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1168 .snapshot();
1169 let mut syntax = SyntaxMap::new(&text).snapshot();
1170 if let Some(language) = language.clone() {
1171 let language_registry = language_registry.clone();
1172 syntax.reparse(&text, language_registry, language);
1173 }
1174 let tree_sitter_data = TreeSitterData::new(text.clone());
1175 BufferSnapshot {
1176 text,
1177 syntax,
1178 file: None,
1179 diagnostics: Default::default(),
1180 remote_selections: Default::default(),
1181 tree_sitter_data: Arc::new(tree_sitter_data),
1182 language,
1183 non_text_state_update_count: 0,
1184 capability: Capability::ReadOnly,
1185 modeline,
1186 }
1187 }
1188 }
1189
1190 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1191 let entity_id = cx.reserve_entity::<Self>().entity_id();
1192 let buffer_id = entity_id.as_non_zero_u64().into();
1193 let text = TextBuffer::new_normalized(
1194 ReplicaId::LOCAL,
1195 buffer_id,
1196 Default::default(),
1197 Rope::new(),
1198 )
1199 .snapshot();
1200 let syntax = SyntaxMap::new(&text).snapshot();
1201 let tree_sitter_data = TreeSitterData::new(text.clone());
1202 BufferSnapshot {
1203 text,
1204 syntax,
1205 tree_sitter_data: Arc::new(tree_sitter_data),
1206 file: None,
1207 diagnostics: Default::default(),
1208 remote_selections: Default::default(),
1209 language: None,
1210 non_text_state_update_count: 0,
1211 capability: Capability::ReadOnly,
1212 modeline: None,
1213 }
1214 }
1215
1216 #[cfg(any(test, feature = "test-support"))]
1217 pub fn build_snapshot_sync(
1218 text: Rope,
1219 language: Option<Arc<Language>>,
1220 language_registry: Option<Arc<LanguageRegistry>>,
1221 cx: &mut App,
1222 ) -> BufferSnapshot {
1223 let entity_id = cx.reserve_entity::<Self>().entity_id();
1224 let buffer_id = entity_id.as_non_zero_u64().into();
1225 let text =
1226 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1227 .snapshot();
1228 let mut syntax = SyntaxMap::new(&text).snapshot();
1229 if let Some(language) = language.clone() {
1230 syntax.reparse(&text, language_registry, language);
1231 }
1232 let tree_sitter_data = TreeSitterData::new(text.clone());
1233 BufferSnapshot {
1234 text,
1235 syntax,
1236 tree_sitter_data: Arc::new(tree_sitter_data),
1237 file: None,
1238 diagnostics: Default::default(),
1239 remote_selections: Default::default(),
1240 language,
1241 non_text_state_update_count: 0,
1242 capability: Capability::ReadOnly,
1243 modeline: None,
1244 }
1245 }
1246
1247 /// Retrieve a snapshot of the buffer's current state. This is computationally
1248 /// cheap, and allows reading from the buffer on a background thread.
1249 pub fn snapshot(&self) -> BufferSnapshot {
1250 let text = self.text.snapshot();
1251 let mut syntax_map = self.syntax_map.lock();
1252 syntax_map.interpolate(&text);
1253 let syntax = syntax_map.snapshot();
1254
1255 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1256 Arc::new(TreeSitterData::new(text.clone()))
1257 } else {
1258 self.tree_sitter_data.clone()
1259 };
1260
1261 BufferSnapshot {
1262 text,
1263 syntax,
1264 tree_sitter_data,
1265 file: self.file.clone(),
1266 remote_selections: self.remote_selections.clone(),
1267 diagnostics: self.diagnostics.clone(),
1268 language: self.language.clone(),
1269 non_text_state_update_count: self.non_text_state_update_count,
1270 capability: self.capability,
1271 modeline: self.modeline.clone(),
1272 }
1273 }
1274
1275 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1276 let this = cx.entity();
1277 cx.new(|cx| {
1278 let mut branch = Self {
1279 branch_state: Some(BufferBranchState {
1280 base_buffer: this.clone(),
1281 merged_operations: Default::default(),
1282 }),
1283 language: self.language.clone(),
1284 has_conflict: self.has_conflict,
1285 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1286 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1287 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1288 };
1289 if let Some(language_registry) = self.language_registry() {
1290 branch.set_language_registry(language_registry);
1291 }
1292
1293 // Reparse the branch buffer so that we get syntax highlighting immediately.
1294 branch.reparse(cx, true);
1295
1296 branch
1297 })
1298 }
1299
1300 pub fn preview_edits(
1301 &self,
1302 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1303 cx: &App,
1304 ) -> Task<EditPreview> {
1305 let registry = self.language_registry();
1306 let language = self.language().cloned();
1307 let old_snapshot = self.text.snapshot();
1308 let mut branch_buffer = self.text.branch();
1309 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1310 cx.background_spawn(async move {
1311 if !edits.is_empty() {
1312 if let Some(language) = language.clone() {
1313 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1314 }
1315
1316 branch_buffer.edit(edits.iter().cloned());
1317 let snapshot = branch_buffer.snapshot();
1318 syntax_snapshot.interpolate(&snapshot);
1319
1320 if let Some(language) = language {
1321 syntax_snapshot.reparse(&snapshot, registry, language);
1322 }
1323 }
1324 EditPreview {
1325 old_snapshot,
1326 applied_edits_snapshot: branch_buffer.snapshot(),
1327 syntax_snapshot,
1328 }
1329 })
1330 }
1331
1332 /// Applies all of the changes in this buffer that intersect any of the
1333 /// given `ranges` to its base buffer.
1334 ///
1335 /// If `ranges` is empty, then all changes will be applied. This buffer must
1336 /// be a branch buffer to call this method.
1337 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1338 let Some(base_buffer) = self.base_buffer() else {
1339 debug_panic!("not a branch buffer");
1340 return;
1341 };
1342
1343 let mut ranges = if ranges.is_empty() {
1344 &[0..usize::MAX]
1345 } else {
1346 ranges.as_slice()
1347 }
1348 .iter()
1349 .peekable();
1350
1351 let mut edits = Vec::new();
1352 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1353 let mut is_included = false;
1354 while let Some(range) = ranges.peek() {
1355 if range.end < edit.new.start {
1356 ranges.next().unwrap();
1357 } else {
1358 if range.start <= edit.new.end {
1359 is_included = true;
1360 }
1361 break;
1362 }
1363 }
1364
1365 if is_included {
1366 edits.push((
1367 edit.old.clone(),
1368 self.text_for_range(edit.new.clone()).collect::<String>(),
1369 ));
1370 }
1371 }
1372
1373 let operation = base_buffer.update(cx, |base_buffer, cx| {
1374 // cx.emit(BufferEvent::DiffBaseChanged);
1375 base_buffer.edit(edits, None, cx)
1376 });
1377
1378 if let Some(operation) = operation
1379 && let Some(BufferBranchState {
1380 merged_operations, ..
1381 }) = &mut self.branch_state
1382 {
1383 merged_operations.push(operation);
1384 }
1385 }
1386
1387 fn on_base_buffer_event(
1388 &mut self,
1389 _: Entity<Buffer>,
1390 event: &BufferEvent,
1391 cx: &mut Context<Self>,
1392 ) {
1393 let BufferEvent::Operation { operation, .. } = event else {
1394 return;
1395 };
1396 let Some(BufferBranchState {
1397 merged_operations, ..
1398 }) = &mut self.branch_state
1399 else {
1400 return;
1401 };
1402
1403 let mut operation_to_undo = None;
1404 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1405 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1406 {
1407 merged_operations.remove(ix);
1408 operation_to_undo = Some(operation.timestamp);
1409 }
1410
1411 self.apply_ops([operation.clone()], cx);
1412
1413 if let Some(timestamp) = operation_to_undo {
1414 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1415 self.undo_operations(counts, cx);
1416 }
1417 }
1418
1419 #[cfg(test)]
1420 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1421 &self.text
1422 }
1423
1424 /// Retrieve a snapshot of the buffer's raw text, without any
1425 /// language-related state like the syntax tree or diagnostics.
1426 #[ztracing::instrument(skip_all)]
1427 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1428 self.text.snapshot()
1429 }
1430
1431 /// The file associated with the buffer, if any.
1432 pub fn file(&self) -> Option<&Arc<dyn File>> {
1433 self.file.as_ref()
1434 }
1435
1436 /// The version of the buffer that was last saved or reloaded from disk.
1437 pub fn saved_version(&self) -> &clock::Global {
1438 &self.saved_version
1439 }
1440
1441 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1442 pub fn saved_mtime(&self) -> Option<MTime> {
1443 self.saved_mtime
1444 }
1445
1446 /// Returns the character encoding of the buffer's file.
1447 pub fn encoding(&self) -> &'static Encoding {
1448 self.encoding
1449 }
1450
1451 /// Sets the character encoding of the buffer.
1452 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1453 self.encoding = encoding;
1454 }
1455
1456 /// Returns whether the buffer has a Byte Order Mark.
1457 pub fn has_bom(&self) -> bool {
1458 self.has_bom
1459 }
1460
1461 /// Sets whether the buffer has a Byte Order Mark.
1462 pub fn set_has_bom(&mut self, has_bom: bool) {
1463 self.has_bom = has_bom;
1464 }
1465
1466 /// Assign a language to the buffer.
1467 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1468 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1469 }
1470
1471 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1472 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1473 self.set_language_(language, true, cx);
1474 }
1475
1476 #[ztracing::instrument(skip_all)]
1477 fn set_language_(
1478 &mut self,
1479 language: Option<Arc<Language>>,
1480 may_block: bool,
1481 cx: &mut Context<Self>,
1482 ) {
1483 self.non_text_state_update_count += 1;
1484 self.syntax_map.lock().clear(&self.text);
1485 let old_language = std::mem::replace(&mut self.language, language);
1486 self.was_changed();
1487 self.reparse(cx, may_block);
1488 let has_fresh_language =
1489 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1490 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1491 }
1492
1493 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1494 /// other languages if parts of the buffer are written in different languages.
1495 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1496 self.syntax_map
1497 .lock()
1498 .set_language_registry(language_registry);
1499 }
1500
1501 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1502 self.syntax_map.lock().language_registry()
1503 }
1504
1505 /// Assign the line ending type to the buffer.
1506 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1507 self.text.set_line_ending(line_ending);
1508
1509 let lamport_timestamp = self.text.lamport_clock.tick();
1510 self.send_operation(
1511 Operation::UpdateLineEnding {
1512 line_ending,
1513 lamport_timestamp,
1514 },
1515 true,
1516 cx,
1517 );
1518 }
1519
1520 /// Assign the buffer [`ModelineSettings`].
1521 pub fn set_modeline(&mut self, modeline: Option<ModelineSettings>) -> bool {
1522 if modeline.as_ref() != self.modeline.as_deref() {
1523 self.modeline = modeline.map(Arc::new);
1524 true
1525 } else {
1526 false
1527 }
1528 }
1529
1530 /// Returns the [`ModelineSettings`].
1531 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
1532 self.modeline.as_ref()
1533 }
1534
1535 /// Assign the buffer a new [`Capability`].
1536 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1537 if self.capability != capability {
1538 self.capability = capability;
1539 cx.emit(BufferEvent::CapabilityChanged)
1540 }
1541 }
1542
1543 /// This method is called to signal that the buffer has been saved.
1544 pub fn did_save(
1545 &mut self,
1546 version: clock::Global,
1547 mtime: Option<MTime>,
1548 cx: &mut Context<Self>,
1549 ) {
1550 self.saved_version = version.clone();
1551 self.has_unsaved_edits.set((version, false));
1552 self.has_conflict = false;
1553 self.saved_mtime = mtime;
1554 self.was_changed();
1555 cx.emit(BufferEvent::Saved);
1556 cx.notify();
1557 }
1558
1559 /// Reloads the contents of the buffer from disk.
1560 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1561 let (tx, rx) = futures::channel::oneshot::channel();
1562 let prev_version = self.text.version();
1563 self.reload_task = Some(cx.spawn(async move |this, cx| {
1564 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1565 let file = this.file.as_ref()?.as_local()?;
1566 Some((
1567 file.disk_state().mtime(),
1568 file.load_bytes(cx),
1569 this.encoding,
1570 ))
1571 })?
1572 else {
1573 return Ok(());
1574 };
1575
1576 let bytes = load_bytes_task.await?;
1577 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1578 let new_text = cow.into_owned();
1579
1580 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1581 this.update(cx, |this, cx| {
1582 if this.version() == diff.base_version {
1583 this.finalize_last_transaction();
1584 this.apply_diff(diff, cx);
1585 tx.send(this.finalize_last_transaction().cloned()).ok();
1586 this.has_conflict = false;
1587 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1588 } else {
1589 if !diff.edits.is_empty()
1590 || this
1591 .edits_since::<usize>(&diff.base_version)
1592 .next()
1593 .is_some()
1594 {
1595 this.has_conflict = true;
1596 }
1597
1598 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1599 }
1600
1601 this.reload_task.take();
1602 })
1603 }));
1604 rx
1605 }
1606
1607 /// This method is called to signal that the buffer has been reloaded.
1608 pub fn did_reload(
1609 &mut self,
1610 version: clock::Global,
1611 line_ending: LineEnding,
1612 mtime: Option<MTime>,
1613 cx: &mut Context<Self>,
1614 ) {
1615 self.saved_version = version;
1616 self.has_unsaved_edits
1617 .set((self.saved_version.clone(), false));
1618 self.text.set_line_ending(line_ending);
1619 self.saved_mtime = mtime;
1620 cx.emit(BufferEvent::Reloaded);
1621 cx.notify();
1622 }
1623
1624 /// Updates the [`File`] backing this buffer. This should be called when
1625 /// the file has changed or has been deleted.
1626 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1627 let was_dirty = self.is_dirty();
1628 let mut file_changed = false;
1629
1630 if let Some(old_file) = self.file.as_ref() {
1631 if new_file.path() != old_file.path() {
1632 file_changed = true;
1633 }
1634
1635 let old_state = old_file.disk_state();
1636 let new_state = new_file.disk_state();
1637 if old_state != new_state {
1638 file_changed = true;
1639 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1640 cx.emit(BufferEvent::ReloadNeeded)
1641 }
1642 }
1643 } else {
1644 file_changed = true;
1645 };
1646
1647 self.file = Some(new_file);
1648 if file_changed {
1649 self.was_changed();
1650 self.non_text_state_update_count += 1;
1651 if was_dirty != self.is_dirty() {
1652 cx.emit(BufferEvent::DirtyChanged);
1653 }
1654 cx.emit(BufferEvent::FileHandleChanged);
1655 cx.notify();
1656 }
1657 }
1658
1659 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1660 Some(self.branch_state.as_ref()?.base_buffer.clone())
1661 }
1662
1663 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1664 pub fn language(&self) -> Option<&Arc<Language>> {
1665 self.language.as_ref()
1666 }
1667
1668 /// Returns the [`Language`] at the given location.
1669 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1670 let offset = position.to_offset(self);
1671 let mut is_first = true;
1672 let start_anchor = self.anchor_before(offset);
1673 let end_anchor = self.anchor_after(offset);
1674 self.syntax_map
1675 .lock()
1676 .layers_for_range(offset..offset, &self.text, false)
1677 .filter(|layer| {
1678 if is_first {
1679 is_first = false;
1680 return true;
1681 }
1682
1683 layer
1684 .included_sub_ranges
1685 .map(|sub_ranges| {
1686 sub_ranges.iter().any(|sub_range| {
1687 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1688 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1689 !is_before_start && !is_after_end
1690 })
1691 })
1692 .unwrap_or(true)
1693 })
1694 .last()
1695 .map(|info| info.language.clone())
1696 .or_else(|| self.language.clone())
1697 }
1698
1699 /// Returns each [`Language`] for the active syntax layers at the given location.
1700 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1701 let offset = position.to_offset(self);
1702 let mut languages: Vec<Arc<Language>> = self
1703 .syntax_map
1704 .lock()
1705 .layers_for_range(offset..offset, &self.text, false)
1706 .map(|info| info.language.clone())
1707 .collect();
1708
1709 if languages.is_empty()
1710 && let Some(buffer_language) = self.language()
1711 {
1712 languages.push(buffer_language.clone());
1713 }
1714
1715 languages
1716 }
1717
1718 /// An integer version number that accounts for all updates besides
1719 /// the buffer's text itself (which is versioned via a version vector).
1720 pub fn non_text_state_update_count(&self) -> usize {
1721 self.non_text_state_update_count
1722 }
1723
1724 /// Whether the buffer is being parsed in the background.
1725 #[cfg(any(test, feature = "test-support"))]
1726 pub fn is_parsing(&self) -> bool {
1727 self.reparse.is_some()
1728 }
1729
1730 /// Indicates whether the buffer contains any regions that may be
1731 /// written in a language that hasn't been loaded yet.
1732 pub fn contains_unknown_injections(&self) -> bool {
1733 self.syntax_map.lock().contains_unknown_injections()
1734 }
1735
1736 #[cfg(any(test, feature = "test-support"))]
1737 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1738 self.sync_parse_timeout = timeout;
1739 }
1740
1741 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1742 match Arc::get_mut(&mut self.tree_sitter_data) {
1743 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1744 None => {
1745 let tree_sitter_data = TreeSitterData::new(snapshot);
1746 self.tree_sitter_data = Arc::new(tree_sitter_data)
1747 }
1748 }
1749 }
1750
1751 /// Called after an edit to synchronize the buffer's main parse tree with
1752 /// the buffer's new underlying state.
1753 ///
1754 /// Locks the syntax map and interpolates the edits since the last reparse
1755 /// into the foreground syntax tree.
1756 ///
1757 /// Then takes a stable snapshot of the syntax map before unlocking it.
1758 /// The snapshot with the interpolated edits is sent to a background thread,
1759 /// where we ask Tree-sitter to perform an incremental parse.
1760 ///
1761 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1762 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1763 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1764 ///
1765 /// If we time out waiting on the parse, we spawn a second task waiting
1766 /// until the parse does complete and return with the interpolated tree still
1767 /// in the foreground. When the background parse completes, call back into
1768 /// the main thread and assign the foreground parse state.
1769 ///
1770 /// If the buffer or grammar changed since the start of the background parse,
1771 /// initiate an additional reparse recursively. To avoid concurrent parses
1772 /// for the same buffer, we only initiate a new parse if we are not already
1773 /// parsing in the background.
1774 #[ztracing::instrument(skip_all)]
1775 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1776 if self.text.version() != *self.tree_sitter_data.version() {
1777 self.invalidate_tree_sitter_data(self.text.snapshot());
1778 }
1779 if self.reparse.is_some() {
1780 return;
1781 }
1782 let language = if let Some(language) = self.language.clone() {
1783 language
1784 } else {
1785 return;
1786 };
1787
1788 let text = self.text_snapshot();
1789 let parsed_version = self.version();
1790
1791 let mut syntax_map = self.syntax_map.lock();
1792 syntax_map.interpolate(&text);
1793 let language_registry = syntax_map.language_registry();
1794 let mut syntax_snapshot = syntax_map.snapshot();
1795 drop(syntax_map);
1796
1797 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1798 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1799 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1800 &text,
1801 language_registry.clone(),
1802 language.clone(),
1803 sync_parse_timeout,
1804 ) {
1805 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1806 self.reparse = None;
1807 return;
1808 }
1809 }
1810
1811 let parse_task = cx.background_spawn({
1812 let language = language.clone();
1813 let language_registry = language_registry.clone();
1814 async move {
1815 syntax_snapshot.reparse(&text, language_registry, language);
1816 syntax_snapshot
1817 }
1818 });
1819
1820 self.reparse = Some(cx.spawn(async move |this, cx| {
1821 let new_syntax_map = parse_task.await;
1822 this.update(cx, move |this, cx| {
1823 let grammar_changed = || {
1824 this.language
1825 .as_ref()
1826 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1827 };
1828 let language_registry_changed = || {
1829 new_syntax_map.contains_unknown_injections()
1830 && language_registry.is_some_and(|registry| {
1831 registry.version() != new_syntax_map.language_registry_version()
1832 })
1833 };
1834 let parse_again = this.version.changed_since(&parsed_version)
1835 || language_registry_changed()
1836 || grammar_changed();
1837 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1838 this.reparse = None;
1839 if parse_again {
1840 this.reparse(cx, false);
1841 }
1842 })
1843 .ok();
1844 }));
1845 }
1846
1847 fn did_finish_parsing(
1848 &mut self,
1849 syntax_snapshot: SyntaxSnapshot,
1850 block_budget: Duration,
1851 cx: &mut Context<Self>,
1852 ) {
1853 self.non_text_state_update_count += 1;
1854 self.syntax_map.lock().did_parse(syntax_snapshot);
1855 self.was_changed();
1856 self.request_autoindent(cx, block_budget);
1857 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1858 self.invalidate_tree_sitter_data(self.text.snapshot());
1859 cx.emit(BufferEvent::Reparsed);
1860 cx.notify();
1861 }
1862
1863 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1864 self.parse_status.1.clone()
1865 }
1866
1867 /// Wait until the buffer is no longer parsing
1868 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1869 let mut parse_status = self.parse_status();
1870 async move {
1871 while *parse_status.borrow() != ParseStatus::Idle {
1872 if parse_status.changed().await.is_err() {
1873 break;
1874 }
1875 }
1876 }
1877 }
1878
1879 /// Assign to the buffer a set of diagnostics created by a given language server.
1880 pub fn update_diagnostics(
1881 &mut self,
1882 server_id: LanguageServerId,
1883 diagnostics: DiagnosticSet,
1884 cx: &mut Context<Self>,
1885 ) {
1886 let lamport_timestamp = self.text.lamport_clock.tick();
1887 let op = Operation::UpdateDiagnostics {
1888 server_id,
1889 diagnostics: diagnostics.iter().cloned().collect(),
1890 lamport_timestamp,
1891 };
1892
1893 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1894 self.send_operation(op, true, cx);
1895 }
1896
1897 pub fn buffer_diagnostics(
1898 &self,
1899 for_server: Option<LanguageServerId>,
1900 ) -> Vec<&DiagnosticEntry<Anchor>> {
1901 match for_server {
1902 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1903 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1904 Err(_) => Vec::new(),
1905 },
1906 None => self
1907 .diagnostics
1908 .iter()
1909 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1910 .collect(),
1911 }
1912 }
1913
1914 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1915 if let Some(indent_sizes) = self.compute_autoindents() {
1916 let indent_sizes = cx.background_spawn(indent_sizes);
1917 match cx
1918 .foreground_executor()
1919 .block_with_timeout(block_budget, indent_sizes)
1920 {
1921 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1922 Err(indent_sizes) => {
1923 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1924 let indent_sizes = indent_sizes.await;
1925 this.update(cx, |this, cx| {
1926 this.apply_autoindents(indent_sizes, cx);
1927 })
1928 .ok();
1929 }));
1930 }
1931 }
1932 } else {
1933 self.autoindent_requests.clear();
1934 for tx in self.wait_for_autoindent_txs.drain(..) {
1935 tx.send(()).ok();
1936 }
1937 }
1938 }
1939
1940 fn compute_autoindents(
1941 &self,
1942 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1943 let max_rows_between_yields = 100;
1944 let snapshot = self.snapshot();
1945 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1946 return None;
1947 }
1948
1949 let autoindent_requests = self.autoindent_requests.clone();
1950 Some(async move {
1951 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1952 for request in autoindent_requests {
1953 // Resolve each edited range to its row in the current buffer and in the
1954 // buffer before this batch of edits.
1955 let mut row_ranges = Vec::new();
1956 let mut old_to_new_rows = BTreeMap::new();
1957 let mut language_indent_sizes_by_new_row = Vec::new();
1958 for entry in &request.entries {
1959 let position = entry.range.start;
1960 let new_row = position.to_point(&snapshot).row;
1961 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1962 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1963
1964 if let Some(old_row) = entry.old_row {
1965 old_to_new_rows.insert(old_row, new_row);
1966 }
1967 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1968 }
1969
1970 // Build a map containing the suggested indentation for each of the edited lines
1971 // with respect to the state of the buffer before these edits. This map is keyed
1972 // by the rows for these lines in the current state of the buffer.
1973 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1974 let old_edited_ranges =
1975 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1976 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1977 let mut language_indent_size = IndentSize::default();
1978 for old_edited_range in old_edited_ranges {
1979 let suggestions = request
1980 .before_edit
1981 .suggest_autoindents(old_edited_range.clone())
1982 .into_iter()
1983 .flatten();
1984 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1985 if let Some(suggestion) = suggestion {
1986 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1987
1988 // Find the indent size based on the language for this row.
1989 while let Some((row, size)) = language_indent_sizes.peek() {
1990 if *row > new_row {
1991 break;
1992 }
1993 language_indent_size = *size;
1994 language_indent_sizes.next();
1995 }
1996
1997 let suggested_indent = old_to_new_rows
1998 .get(&suggestion.basis_row)
1999 .and_then(|from_row| {
2000 Some(old_suggestions.get(from_row).copied()?.0)
2001 })
2002 .unwrap_or_else(|| {
2003 request
2004 .before_edit
2005 .indent_size_for_line(suggestion.basis_row)
2006 })
2007 .with_delta(suggestion.delta, language_indent_size);
2008 old_suggestions
2009 .insert(new_row, (suggested_indent, suggestion.within_error));
2010 }
2011 }
2012 yield_now().await;
2013 }
2014
2015 // Compute new suggestions for each line, but only include them in the result
2016 // if they differ from the old suggestion for that line.
2017 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
2018 let mut language_indent_size = IndentSize::default();
2019 for (row_range, original_indent_column) in row_ranges {
2020 let new_edited_row_range = if request.is_block_mode {
2021 row_range.start..row_range.start + 1
2022 } else {
2023 row_range.clone()
2024 };
2025
2026 let suggestions = snapshot
2027 .suggest_autoindents(new_edited_row_range.clone())
2028 .into_iter()
2029 .flatten();
2030 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2031 if let Some(suggestion) = suggestion {
2032 // Find the indent size based on the language for this row.
2033 while let Some((row, size)) = language_indent_sizes.peek() {
2034 if *row > new_row {
2035 break;
2036 }
2037 language_indent_size = *size;
2038 language_indent_sizes.next();
2039 }
2040
2041 let suggested_indent = indent_sizes
2042 .get(&suggestion.basis_row)
2043 .copied()
2044 .map(|e| e.0)
2045 .unwrap_or_else(|| {
2046 snapshot.indent_size_for_line(suggestion.basis_row)
2047 })
2048 .with_delta(suggestion.delta, language_indent_size);
2049
2050 if old_suggestions.get(&new_row).is_none_or(
2051 |(old_indentation, was_within_error)| {
2052 suggested_indent != *old_indentation
2053 && (!suggestion.within_error || *was_within_error)
2054 },
2055 ) {
2056 indent_sizes.insert(
2057 new_row,
2058 (suggested_indent, request.ignore_empty_lines),
2059 );
2060 }
2061 }
2062 }
2063
2064 if let (true, Some(original_indent_column)) =
2065 (request.is_block_mode, original_indent_column)
2066 {
2067 let new_indent =
2068 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2069 *indent
2070 } else {
2071 snapshot.indent_size_for_line(row_range.start)
2072 };
2073 let delta = new_indent.len as i64 - original_indent_column as i64;
2074 if delta != 0 {
2075 for row in row_range.skip(1) {
2076 indent_sizes.entry(row).or_insert_with(|| {
2077 let mut size = snapshot.indent_size_for_line(row);
2078 if size.kind == new_indent.kind {
2079 match delta.cmp(&0) {
2080 Ordering::Greater => size.len += delta as u32,
2081 Ordering::Less => {
2082 size.len = size.len.saturating_sub(-delta as u32)
2083 }
2084 Ordering::Equal => {}
2085 }
2086 }
2087 (size, request.ignore_empty_lines)
2088 });
2089 }
2090 }
2091 }
2092
2093 yield_now().await;
2094 }
2095 }
2096
2097 indent_sizes
2098 .into_iter()
2099 .filter_map(|(row, (indent, ignore_empty_lines))| {
2100 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2101 None
2102 } else {
2103 Some((row, indent))
2104 }
2105 })
2106 .collect()
2107 })
2108 }
2109
2110 fn apply_autoindents(
2111 &mut self,
2112 indent_sizes: BTreeMap<u32, IndentSize>,
2113 cx: &mut Context<Self>,
2114 ) {
2115 self.autoindent_requests.clear();
2116 for tx in self.wait_for_autoindent_txs.drain(..) {
2117 tx.send(()).ok();
2118 }
2119
2120 let edits: Vec<_> = indent_sizes
2121 .into_iter()
2122 .filter_map(|(row, indent_size)| {
2123 let current_size = indent_size_for_line(self, row);
2124 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2125 })
2126 .collect();
2127
2128 let preserve_preview = self.preserve_preview();
2129 self.edit(edits, None, cx);
2130 if preserve_preview {
2131 self.refresh_preview();
2132 }
2133 }
2134
2135 /// Create a minimal edit that will cause the given row to be indented
2136 /// with the given size. After applying this edit, the length of the line
2137 /// will always be at least `new_size.len`.
2138 pub fn edit_for_indent_size_adjustment(
2139 row: u32,
2140 current_size: IndentSize,
2141 new_size: IndentSize,
2142 ) -> Option<(Range<Point>, String)> {
2143 if new_size.kind == current_size.kind {
2144 match new_size.len.cmp(¤t_size.len) {
2145 Ordering::Greater => {
2146 let point = Point::new(row, 0);
2147 Some((
2148 point..point,
2149 iter::repeat(new_size.char())
2150 .take((new_size.len - current_size.len) as usize)
2151 .collect::<String>(),
2152 ))
2153 }
2154
2155 Ordering::Less => Some((
2156 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2157 String::new(),
2158 )),
2159
2160 Ordering::Equal => None,
2161 }
2162 } else {
2163 Some((
2164 Point::new(row, 0)..Point::new(row, current_size.len),
2165 iter::repeat(new_size.char())
2166 .take(new_size.len as usize)
2167 .collect::<String>(),
2168 ))
2169 }
2170 }
2171
2172 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2173 /// and the given new text.
2174 pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
2175 where
2176 T: AsRef<str> + Send + 'static,
2177 {
2178 let old_text = self.as_rope().clone();
2179 let base_version = self.version();
2180 cx.background_spawn(async move {
2181 let old_text = old_text.to_string();
2182 let mut new_text = new_text.as_ref().to_owned();
2183 let line_ending = LineEnding::detect(&new_text);
2184 LineEnding::normalize(&mut new_text);
2185 let edits = text_diff(&old_text, &new_text);
2186 Diff {
2187 base_version,
2188 line_ending,
2189 edits,
2190 }
2191 })
2192 }
2193
2194 /// Spawns a background task that searches the buffer for any whitespace
2195 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2196 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2197 let old_text = self.as_rope().clone();
2198 let line_ending = self.line_ending();
2199 let base_version = self.version();
2200 cx.background_spawn(async move {
2201 let ranges = trailing_whitespace_ranges(&old_text);
2202 let empty = Arc::<str>::from("");
2203 Diff {
2204 base_version,
2205 line_ending,
2206 edits: ranges
2207 .into_iter()
2208 .map(|range| (range, empty.clone()))
2209 .collect(),
2210 }
2211 })
2212 }
2213
2214 /// Ensures that the buffer ends with a single newline character, and
2215 /// no other whitespace. Skips if the buffer is empty.
2216 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2217 let len = self.len();
2218 if len == 0 {
2219 return;
2220 }
2221 let mut offset = len;
2222 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2223 let non_whitespace_len = chunk
2224 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2225 .len();
2226 offset -= chunk.len();
2227 offset += non_whitespace_len;
2228 if non_whitespace_len != 0 {
2229 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2230 return;
2231 }
2232 break;
2233 }
2234 }
2235 self.edit([(offset..len, "\n")], None, cx);
2236 }
2237
2238 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2239 /// calculated, then adjust the diff to account for those changes, and discard any
2240 /// parts of the diff that conflict with those changes.
2241 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2242 let snapshot = self.snapshot();
2243 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2244 let mut delta = 0;
2245 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2246 while let Some(edit_since) = edits_since.peek() {
2247 // If the edit occurs after a diff hunk, then it does not
2248 // affect that hunk.
2249 if edit_since.old.start > range.end {
2250 break;
2251 }
2252 // If the edit precedes the diff hunk, then adjust the hunk
2253 // to reflect the edit.
2254 else if edit_since.old.end < range.start {
2255 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2256 edits_since.next();
2257 }
2258 // If the edit intersects a diff hunk, then discard that hunk.
2259 else {
2260 return None;
2261 }
2262 }
2263
2264 let start = (range.start as i64 + delta) as usize;
2265 let end = (range.end as i64 + delta) as usize;
2266 Some((start..end, new_text))
2267 });
2268
2269 self.start_transaction();
2270 self.text.set_line_ending(diff.line_ending);
2271 self.edit(adjusted_edits, None, cx);
2272 self.end_transaction(cx)
2273 }
2274
2275 pub fn has_unsaved_edits(&self) -> bool {
2276 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2277
2278 if last_version == self.version {
2279 self.has_unsaved_edits
2280 .set((last_version, has_unsaved_edits));
2281 return has_unsaved_edits;
2282 }
2283
2284 let has_edits = self.has_edits_since(&self.saved_version);
2285 self.has_unsaved_edits
2286 .set((self.version.clone(), has_edits));
2287 has_edits
2288 }
2289
2290 /// Checks if the buffer has unsaved changes.
2291 pub fn is_dirty(&self) -> bool {
2292 if self.capability == Capability::ReadOnly {
2293 return false;
2294 }
2295 if self.has_conflict {
2296 return true;
2297 }
2298 match self.file.as_ref().map(|f| f.disk_state()) {
2299 Some(DiskState::New) | Some(DiskState::Deleted) => {
2300 !self.is_empty() && self.has_unsaved_edits()
2301 }
2302 _ => self.has_unsaved_edits(),
2303 }
2304 }
2305
2306 /// Marks the buffer as having a conflict regardless of current buffer state.
2307 pub fn set_conflict(&mut self) {
2308 self.has_conflict = true;
2309 }
2310
2311 /// Checks if the buffer and its file have both changed since the buffer
2312 /// was last saved or reloaded.
2313 pub fn has_conflict(&self) -> bool {
2314 if self.has_conflict {
2315 return true;
2316 }
2317 let Some(file) = self.file.as_ref() else {
2318 return false;
2319 };
2320 match file.disk_state() {
2321 DiskState::New => false,
2322 DiskState::Present { mtime } => match self.saved_mtime {
2323 Some(saved_mtime) => {
2324 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2325 }
2326 None => true,
2327 },
2328 DiskState::Deleted => false,
2329 DiskState::Historic { .. } => false,
2330 }
2331 }
2332
2333 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2334 pub fn subscribe(&mut self) -> Subscription<usize> {
2335 self.text.subscribe()
2336 }
2337
2338 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2339 ///
2340 /// This allows downstream code to check if the buffer's text has changed without
2341 /// waiting for an effect cycle, which would be required if using eents.
2342 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2343 if let Err(ix) = self
2344 .change_bits
2345 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2346 {
2347 self.change_bits.insert(ix, bit);
2348 }
2349 }
2350
2351 /// Set the change bit for all "listeners".
2352 fn was_changed(&mut self) {
2353 self.change_bits.retain(|change_bit| {
2354 change_bit
2355 .upgrade()
2356 .inspect(|bit| {
2357 _ = bit.replace(true);
2358 })
2359 .is_some()
2360 });
2361 }
2362
2363 /// Starts a transaction, if one is not already in-progress. When undoing or
2364 /// redoing edits, all of the edits performed within a transaction are undone
2365 /// or redone together.
2366 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2367 self.start_transaction_at(Instant::now())
2368 }
2369
2370 /// Starts a transaction, providing the current time. Subsequent transactions
2371 /// that occur within a short period of time will be grouped together. This
2372 /// is controlled by the buffer's undo grouping duration.
2373 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2374 self.transaction_depth += 1;
2375 if self.was_dirty_before_starting_transaction.is_none() {
2376 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2377 }
2378 self.text.start_transaction_at(now)
2379 }
2380
2381 /// Terminates the current transaction, if this is the outermost transaction.
2382 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2383 self.end_transaction_at(Instant::now(), cx)
2384 }
2385
2386 /// Terminates the current transaction, providing the current time. Subsequent transactions
2387 /// that occur within a short period of time will be grouped together. This
2388 /// is controlled by the buffer's undo grouping duration.
2389 pub fn end_transaction_at(
2390 &mut self,
2391 now: Instant,
2392 cx: &mut Context<Self>,
2393 ) -> Option<TransactionId> {
2394 assert!(self.transaction_depth > 0);
2395 self.transaction_depth -= 1;
2396 let was_dirty = if self.transaction_depth == 0 {
2397 self.was_dirty_before_starting_transaction.take().unwrap()
2398 } else {
2399 false
2400 };
2401 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2402 self.did_edit(&start_version, was_dirty, cx);
2403 Some(transaction_id)
2404 } else {
2405 None
2406 }
2407 }
2408
2409 /// Manually add a transaction to the buffer's undo history.
2410 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2411 self.text.push_transaction(transaction, now);
2412 }
2413
2414 /// Differs from `push_transaction` in that it does not clear the redo
2415 /// stack. Intended to be used to create a parent transaction to merge
2416 /// potential child transactions into.
2417 ///
2418 /// The caller is responsible for removing it from the undo history using
2419 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2420 /// are merged into this transaction, the caller is responsible for ensuring
2421 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2422 /// cleared is to create transactions with the usual `start_transaction` and
2423 /// `end_transaction` methods and merging the resulting transactions into
2424 /// the transaction created by this method
2425 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2426 self.text.push_empty_transaction(now)
2427 }
2428
2429 /// Prevent the last transaction from being grouped with any subsequent transactions,
2430 /// even if they occur with the buffer's undo grouping duration.
2431 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2432 self.text.finalize_last_transaction()
2433 }
2434
2435 /// Manually group all changes since a given transaction.
2436 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2437 self.text.group_until_transaction(transaction_id);
2438 }
2439
2440 /// Manually remove a transaction from the buffer's undo history
2441 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2442 self.text.forget_transaction(transaction_id)
2443 }
2444
2445 /// Retrieve a transaction from the buffer's undo history
2446 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2447 self.text.get_transaction(transaction_id)
2448 }
2449
2450 /// Manually merge two transactions in the buffer's undo history.
2451 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2452 self.text.merge_transactions(transaction, destination);
2453 }
2454
2455 /// Waits for the buffer to receive operations with the given timestamps.
2456 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2457 &mut self,
2458 edit_ids: It,
2459 ) -> impl Future<Output = Result<()>> + use<It> {
2460 self.text.wait_for_edits(edit_ids)
2461 }
2462
2463 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2464 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2465 &mut self,
2466 anchors: It,
2467 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2468 self.text.wait_for_anchors(anchors)
2469 }
2470
2471 /// Waits for the buffer to receive operations up to the given version.
2472 pub fn wait_for_version(
2473 &mut self,
2474 version: clock::Global,
2475 ) -> impl Future<Output = Result<()>> + use<> {
2476 self.text.wait_for_version(version)
2477 }
2478
2479 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2480 /// [`Buffer::wait_for_version`] to resolve with an error.
2481 pub fn give_up_waiting(&mut self) {
2482 self.text.give_up_waiting();
2483 }
2484
2485 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2486 let mut rx = None;
2487 if !self.autoindent_requests.is_empty() {
2488 let channel = oneshot::channel();
2489 self.wait_for_autoindent_txs.push(channel.0);
2490 rx = Some(channel.1);
2491 }
2492 rx
2493 }
2494
2495 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2496 pub fn set_active_selections(
2497 &mut self,
2498 selections: Arc<[Selection<Anchor>]>,
2499 line_mode: bool,
2500 cursor_shape: CursorShape,
2501 cx: &mut Context<Self>,
2502 ) {
2503 let lamport_timestamp = self.text.lamport_clock.tick();
2504 self.remote_selections.insert(
2505 self.text.replica_id(),
2506 SelectionSet {
2507 selections: selections.clone(),
2508 lamport_timestamp,
2509 line_mode,
2510 cursor_shape,
2511 },
2512 );
2513 self.send_operation(
2514 Operation::UpdateSelections {
2515 selections,
2516 line_mode,
2517 lamport_timestamp,
2518 cursor_shape,
2519 },
2520 true,
2521 cx,
2522 );
2523 self.non_text_state_update_count += 1;
2524 cx.notify();
2525 }
2526
2527 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2528 /// this replica.
2529 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2530 if self
2531 .remote_selections
2532 .get(&self.text.replica_id())
2533 .is_none_or(|set| !set.selections.is_empty())
2534 {
2535 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2536 }
2537 }
2538
2539 pub fn set_agent_selections(
2540 &mut self,
2541 selections: Arc<[Selection<Anchor>]>,
2542 line_mode: bool,
2543 cursor_shape: CursorShape,
2544 cx: &mut Context<Self>,
2545 ) {
2546 let lamport_timestamp = self.text.lamport_clock.tick();
2547 self.remote_selections.insert(
2548 ReplicaId::AGENT,
2549 SelectionSet {
2550 selections,
2551 lamport_timestamp,
2552 line_mode,
2553 cursor_shape,
2554 },
2555 );
2556 self.non_text_state_update_count += 1;
2557 cx.notify();
2558 }
2559
2560 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2561 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2562 }
2563
2564 /// Replaces the buffer's entire text.
2565 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2566 where
2567 T: Into<Arc<str>>,
2568 {
2569 self.autoindent_requests.clear();
2570 self.edit([(0..self.len(), text)], None, cx)
2571 }
2572
2573 /// Appends the given text to the end of the buffer.
2574 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2575 where
2576 T: Into<Arc<str>>,
2577 {
2578 self.edit([(self.len()..self.len(), text)], None, cx)
2579 }
2580
2581 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2582 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2583 ///
2584 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2585 /// request for the edited ranges, which will be processed when the buffer finishes
2586 /// parsing.
2587 ///
2588 /// Parsing takes place at the end of a transaction, and may compute synchronously
2589 /// or asynchronously, depending on the changes.
2590 pub fn edit<I, S, T>(
2591 &mut self,
2592 edits_iter: I,
2593 autoindent_mode: Option<AutoindentMode>,
2594 cx: &mut Context<Self>,
2595 ) -> Option<clock::Lamport>
2596 where
2597 I: IntoIterator<Item = (Range<S>, T)>,
2598 S: ToOffset,
2599 T: Into<Arc<str>>,
2600 {
2601 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2602 }
2603
2604 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2605 pub fn edit_non_coalesce<I, S, T>(
2606 &mut self,
2607 edits_iter: I,
2608 autoindent_mode: Option<AutoindentMode>,
2609 cx: &mut Context<Self>,
2610 ) -> Option<clock::Lamport>
2611 where
2612 I: IntoIterator<Item = (Range<S>, T)>,
2613 S: ToOffset,
2614 T: Into<Arc<str>>,
2615 {
2616 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2617 }
2618
2619 fn edit_internal<I, S, T>(
2620 &mut self,
2621 edits_iter: I,
2622 autoindent_mode: Option<AutoindentMode>,
2623 coalesce_adjacent: bool,
2624 cx: &mut Context<Self>,
2625 ) -> Option<clock::Lamport>
2626 where
2627 I: IntoIterator<Item = (Range<S>, T)>,
2628 S: ToOffset,
2629 T: Into<Arc<str>>,
2630 {
2631 // Skip invalid edits and coalesce contiguous ones.
2632 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2633
2634 for (range, new_text) in edits_iter {
2635 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2636
2637 if range.start > range.end {
2638 mem::swap(&mut range.start, &mut range.end);
2639 }
2640 let new_text = new_text.into();
2641 if !new_text.is_empty() || !range.is_empty() {
2642 let prev_edit = edits.last_mut();
2643 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2644 if coalesce_adjacent {
2645 prev_range.end >= range.start
2646 } else {
2647 prev_range.end > range.start
2648 }
2649 });
2650
2651 if let Some((prev_range, prev_text)) = prev_edit
2652 && should_coalesce
2653 {
2654 prev_range.end = cmp::max(prev_range.end, range.end);
2655 *prev_text = format!("{prev_text}{new_text}").into();
2656 } else {
2657 edits.push((range, new_text));
2658 }
2659 }
2660 }
2661 if edits.is_empty() {
2662 return None;
2663 }
2664
2665 self.start_transaction();
2666 self.pending_autoindent.take();
2667 let autoindent_request = autoindent_mode
2668 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2669
2670 let edit_operation = self.text.edit(edits.iter().cloned());
2671 let edit_id = edit_operation.timestamp();
2672
2673 if let Some((before_edit, mode)) = autoindent_request {
2674 let mut delta = 0isize;
2675 let mut previous_setting = None;
2676 let entries: Vec<_> = edits
2677 .into_iter()
2678 .enumerate()
2679 .zip(&edit_operation.as_edit().unwrap().new_text)
2680 .filter(|((_, (range, _)), _)| {
2681 let language = before_edit.language_at(range.start);
2682 let language_id = language.map(|l| l.id());
2683 if let Some((cached_language_id, auto_indent)) = previous_setting
2684 && cached_language_id == language_id
2685 {
2686 auto_indent
2687 } else {
2688 // The auto-indent setting is not present in editorconfigs, hence
2689 // we can avoid passing the file here.
2690 let auto_indent = LanguageSettings::resolve(
2691 None,
2692 language.map(|l| l.name()).as_ref(),
2693 cx,
2694 )
2695 .auto_indent;
2696 previous_setting = Some((language_id, auto_indent));
2697 auto_indent
2698 }
2699 })
2700 .map(|((ix, (range, _)), new_text)| {
2701 let new_text_length = new_text.len();
2702 let old_start = range.start.to_point(&before_edit);
2703 let new_start = (delta + range.start as isize) as usize;
2704 let range_len = range.end - range.start;
2705 delta += new_text_length as isize - range_len as isize;
2706
2707 // Decide what range of the insertion to auto-indent, and whether
2708 // the first line of the insertion should be considered a newly-inserted line
2709 // or an edit to an existing line.
2710 let mut range_of_insertion_to_indent = 0..new_text_length;
2711 let mut first_line_is_new = true;
2712
2713 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2714 let old_line_end = before_edit.line_len(old_start.row);
2715
2716 if old_start.column > old_line_start {
2717 first_line_is_new = false;
2718 }
2719
2720 if !new_text.contains('\n')
2721 && (old_start.column + (range_len as u32) < old_line_end
2722 || old_line_end == old_line_start)
2723 {
2724 first_line_is_new = false;
2725 }
2726
2727 // When inserting text starting with a newline, avoid auto-indenting the
2728 // previous line.
2729 if new_text.starts_with('\n') {
2730 range_of_insertion_to_indent.start += 1;
2731 first_line_is_new = true;
2732 }
2733
2734 let mut original_indent_column = None;
2735 if let AutoindentMode::Block {
2736 original_indent_columns,
2737 } = &mode
2738 {
2739 original_indent_column = Some(if new_text.starts_with('\n') {
2740 indent_size_for_text(
2741 new_text[range_of_insertion_to_indent.clone()].chars(),
2742 )
2743 .len
2744 } else {
2745 original_indent_columns
2746 .get(ix)
2747 .copied()
2748 .flatten()
2749 .unwrap_or_else(|| {
2750 indent_size_for_text(
2751 new_text[range_of_insertion_to_indent.clone()].chars(),
2752 )
2753 .len
2754 })
2755 });
2756
2757 // Avoid auto-indenting the line after the edit.
2758 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2759 range_of_insertion_to_indent.end -= 1;
2760 }
2761 }
2762
2763 AutoindentRequestEntry {
2764 original_indent_column,
2765 old_row: if first_line_is_new {
2766 None
2767 } else {
2768 Some(old_start.row)
2769 },
2770 indent_size: before_edit.language_indent_size_at(range.start, cx),
2771 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2772 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2773 }
2774 })
2775 .collect();
2776
2777 if !entries.is_empty() {
2778 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2779 before_edit,
2780 entries,
2781 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2782 ignore_empty_lines: false,
2783 }));
2784 }
2785 }
2786
2787 self.end_transaction(cx);
2788 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2789 Some(edit_id)
2790 }
2791
2792 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2793 self.was_changed();
2794
2795 if self.edits_since::<usize>(old_version).next().is_none() {
2796 return;
2797 }
2798
2799 self.reparse(cx, true);
2800 cx.emit(BufferEvent::Edited);
2801 if was_dirty != self.is_dirty() {
2802 cx.emit(BufferEvent::DirtyChanged);
2803 }
2804 cx.notify();
2805 }
2806
2807 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2808 where
2809 I: IntoIterator<Item = Range<T>>,
2810 T: ToOffset + Copy,
2811 {
2812 let before_edit = self.snapshot();
2813 let entries = ranges
2814 .into_iter()
2815 .map(|range| AutoindentRequestEntry {
2816 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2817 old_row: None,
2818 indent_size: before_edit.language_indent_size_at(range.start, cx),
2819 original_indent_column: None,
2820 })
2821 .collect();
2822 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2823 before_edit,
2824 entries,
2825 is_block_mode: false,
2826 ignore_empty_lines: true,
2827 }));
2828 self.request_autoindent(cx, Duration::from_micros(300));
2829 }
2830
2831 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2832 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2833 pub fn insert_empty_line(
2834 &mut self,
2835 position: impl ToPoint,
2836 space_above: bool,
2837 space_below: bool,
2838 cx: &mut Context<Self>,
2839 ) -> Point {
2840 let mut position = position.to_point(self);
2841
2842 self.start_transaction();
2843
2844 self.edit(
2845 [(position..position, "\n")],
2846 Some(AutoindentMode::EachLine),
2847 cx,
2848 );
2849
2850 if position.column > 0 {
2851 position += Point::new(1, 0);
2852 }
2853
2854 if !self.is_line_blank(position.row) {
2855 self.edit(
2856 [(position..position, "\n")],
2857 Some(AutoindentMode::EachLine),
2858 cx,
2859 );
2860 }
2861
2862 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2863 self.edit(
2864 [(position..position, "\n")],
2865 Some(AutoindentMode::EachLine),
2866 cx,
2867 );
2868 position.row += 1;
2869 }
2870
2871 if space_below
2872 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2873 {
2874 self.edit(
2875 [(position..position, "\n")],
2876 Some(AutoindentMode::EachLine),
2877 cx,
2878 );
2879 }
2880
2881 self.end_transaction(cx);
2882
2883 position
2884 }
2885
2886 /// Applies the given remote operations to the buffer.
2887 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2888 self.pending_autoindent.take();
2889 let was_dirty = self.is_dirty();
2890 let old_version = self.version.clone();
2891 let mut deferred_ops = Vec::new();
2892 let buffer_ops = ops
2893 .into_iter()
2894 .filter_map(|op| match op {
2895 Operation::Buffer(op) => Some(op),
2896 _ => {
2897 if self.can_apply_op(&op) {
2898 self.apply_op(op, cx);
2899 } else {
2900 deferred_ops.push(op);
2901 }
2902 None
2903 }
2904 })
2905 .collect::<Vec<_>>();
2906 for operation in buffer_ops.iter() {
2907 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2908 }
2909 self.text.apply_ops(buffer_ops);
2910 self.deferred_ops.insert(deferred_ops);
2911 self.flush_deferred_ops(cx);
2912 self.did_edit(&old_version, was_dirty, cx);
2913 // Notify independently of whether the buffer was edited as the operations could include a
2914 // selection update.
2915 cx.notify();
2916 }
2917
2918 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2919 let mut deferred_ops = Vec::new();
2920 for op in self.deferred_ops.drain().iter().cloned() {
2921 if self.can_apply_op(&op) {
2922 self.apply_op(op, cx);
2923 } else {
2924 deferred_ops.push(op);
2925 }
2926 }
2927 self.deferred_ops.insert(deferred_ops);
2928 }
2929
2930 pub fn has_deferred_ops(&self) -> bool {
2931 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2932 }
2933
2934 fn can_apply_op(&self, operation: &Operation) -> bool {
2935 match operation {
2936 Operation::Buffer(_) => {
2937 unreachable!("buffer operations should never be applied at this layer")
2938 }
2939 Operation::UpdateDiagnostics {
2940 diagnostics: diagnostic_set,
2941 ..
2942 } => diagnostic_set.iter().all(|diagnostic| {
2943 self.text.can_resolve(&diagnostic.range.start)
2944 && self.text.can_resolve(&diagnostic.range.end)
2945 }),
2946 Operation::UpdateSelections { selections, .. } => selections
2947 .iter()
2948 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2949 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2950 }
2951 }
2952
2953 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2954 match operation {
2955 Operation::Buffer(_) => {
2956 unreachable!("buffer operations should never be applied at this layer")
2957 }
2958 Operation::UpdateDiagnostics {
2959 server_id,
2960 diagnostics: diagnostic_set,
2961 lamport_timestamp,
2962 } => {
2963 let snapshot = self.snapshot();
2964 self.apply_diagnostic_update(
2965 server_id,
2966 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2967 lamport_timestamp,
2968 cx,
2969 );
2970 }
2971 Operation::UpdateSelections {
2972 selections,
2973 lamport_timestamp,
2974 line_mode,
2975 cursor_shape,
2976 } => {
2977 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2978 && set.lamport_timestamp > lamport_timestamp
2979 {
2980 return;
2981 }
2982
2983 self.remote_selections.insert(
2984 lamport_timestamp.replica_id,
2985 SelectionSet {
2986 selections,
2987 lamport_timestamp,
2988 line_mode,
2989 cursor_shape,
2990 },
2991 );
2992 self.text.lamport_clock.observe(lamport_timestamp);
2993 self.non_text_state_update_count += 1;
2994 }
2995 Operation::UpdateCompletionTriggers {
2996 triggers,
2997 lamport_timestamp,
2998 server_id,
2999 } => {
3000 if triggers.is_empty() {
3001 self.completion_triggers_per_language_server
3002 .remove(&server_id);
3003 self.completion_triggers = self
3004 .completion_triggers_per_language_server
3005 .values()
3006 .flat_map(|triggers| triggers.iter().cloned())
3007 .collect();
3008 } else {
3009 self.completion_triggers_per_language_server
3010 .insert(server_id, triggers.iter().cloned().collect());
3011 self.completion_triggers.extend(triggers);
3012 }
3013 self.text.lamport_clock.observe(lamport_timestamp);
3014 }
3015 Operation::UpdateLineEnding {
3016 line_ending,
3017 lamport_timestamp,
3018 } => {
3019 self.text.set_line_ending(line_ending);
3020 self.text.lamport_clock.observe(lamport_timestamp);
3021 }
3022 }
3023 }
3024
3025 fn apply_diagnostic_update(
3026 &mut self,
3027 server_id: LanguageServerId,
3028 diagnostics: DiagnosticSet,
3029 lamport_timestamp: clock::Lamport,
3030 cx: &mut Context<Self>,
3031 ) {
3032 if lamport_timestamp > self.diagnostics_timestamp {
3033 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3034 if diagnostics.is_empty() {
3035 if let Ok(ix) = ix {
3036 self.diagnostics.remove(ix);
3037 }
3038 } else {
3039 match ix {
3040 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3041 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3042 };
3043 }
3044 self.diagnostics_timestamp = lamport_timestamp;
3045 self.non_text_state_update_count += 1;
3046 self.text.lamport_clock.observe(lamport_timestamp);
3047 cx.notify();
3048 cx.emit(BufferEvent::DiagnosticsUpdated);
3049 }
3050 }
3051
3052 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3053 self.was_changed();
3054 cx.emit(BufferEvent::Operation {
3055 operation,
3056 is_local,
3057 });
3058 }
3059
3060 /// Removes the selections for a given peer.
3061 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3062 self.remote_selections.remove(&replica_id);
3063 cx.notify();
3064 }
3065
3066 /// Undoes the most recent transaction.
3067 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3068 let was_dirty = self.is_dirty();
3069 let old_version = self.version.clone();
3070
3071 if let Some((transaction_id, operation)) = self.text.undo() {
3072 self.send_operation(Operation::Buffer(operation), true, cx);
3073 self.did_edit(&old_version, was_dirty, cx);
3074 Some(transaction_id)
3075 } else {
3076 None
3077 }
3078 }
3079
3080 /// Manually undoes a specific transaction in the buffer's undo history.
3081 pub fn undo_transaction(
3082 &mut self,
3083 transaction_id: TransactionId,
3084 cx: &mut Context<Self>,
3085 ) -> bool {
3086 let was_dirty = self.is_dirty();
3087 let old_version = self.version.clone();
3088 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3089 self.send_operation(Operation::Buffer(operation), true, cx);
3090 self.did_edit(&old_version, was_dirty, cx);
3091 true
3092 } else {
3093 false
3094 }
3095 }
3096
3097 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3098 pub fn undo_to_transaction(
3099 &mut self,
3100 transaction_id: TransactionId,
3101 cx: &mut Context<Self>,
3102 ) -> bool {
3103 let was_dirty = self.is_dirty();
3104 let old_version = self.version.clone();
3105
3106 let operations = self.text.undo_to_transaction(transaction_id);
3107 let undone = !operations.is_empty();
3108 for operation in operations {
3109 self.send_operation(Operation::Buffer(operation), true, cx);
3110 }
3111 if undone {
3112 self.did_edit(&old_version, was_dirty, cx)
3113 }
3114 undone
3115 }
3116
3117 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3118 let was_dirty = self.is_dirty();
3119 let operation = self.text.undo_operations(counts);
3120 let old_version = self.version.clone();
3121 self.send_operation(Operation::Buffer(operation), true, cx);
3122 self.did_edit(&old_version, was_dirty, cx);
3123 }
3124
3125 /// Manually redoes a specific transaction in the buffer's redo history.
3126 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3127 let was_dirty = self.is_dirty();
3128 let old_version = self.version.clone();
3129
3130 if let Some((transaction_id, operation)) = self.text.redo() {
3131 self.send_operation(Operation::Buffer(operation), true, cx);
3132 self.did_edit(&old_version, was_dirty, cx);
3133 Some(transaction_id)
3134 } else {
3135 None
3136 }
3137 }
3138
3139 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3140 pub fn redo_to_transaction(
3141 &mut self,
3142 transaction_id: TransactionId,
3143 cx: &mut Context<Self>,
3144 ) -> bool {
3145 let was_dirty = self.is_dirty();
3146 let old_version = self.version.clone();
3147
3148 let operations = self.text.redo_to_transaction(transaction_id);
3149 let redone = !operations.is_empty();
3150 for operation in operations {
3151 self.send_operation(Operation::Buffer(operation), true, cx);
3152 }
3153 if redone {
3154 self.did_edit(&old_version, was_dirty, cx)
3155 }
3156 redone
3157 }
3158
3159 /// Override current completion triggers with the user-provided completion triggers.
3160 pub fn set_completion_triggers(
3161 &mut self,
3162 server_id: LanguageServerId,
3163 triggers: BTreeSet<String>,
3164 cx: &mut Context<Self>,
3165 ) {
3166 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3167 if triggers.is_empty() {
3168 self.completion_triggers_per_language_server
3169 .remove(&server_id);
3170 self.completion_triggers = self
3171 .completion_triggers_per_language_server
3172 .values()
3173 .flat_map(|triggers| triggers.iter().cloned())
3174 .collect();
3175 } else {
3176 self.completion_triggers_per_language_server
3177 .insert(server_id, triggers.clone());
3178 self.completion_triggers.extend(triggers.iter().cloned());
3179 }
3180 self.send_operation(
3181 Operation::UpdateCompletionTriggers {
3182 triggers: triggers.into_iter().collect(),
3183 lamport_timestamp: self.completion_triggers_timestamp,
3184 server_id,
3185 },
3186 true,
3187 cx,
3188 );
3189 cx.notify();
3190 }
3191
3192 /// Returns a list of strings which trigger a completion menu for this language.
3193 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3194 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3195 &self.completion_triggers
3196 }
3197
3198 /// Call this directly after performing edits to prevent the preview tab
3199 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3200 /// to return false until there are additional edits.
3201 pub fn refresh_preview(&mut self) {
3202 self.preview_version = self.version.clone();
3203 }
3204
3205 /// Whether we should preserve the preview status of a tab containing this buffer.
3206 pub fn preserve_preview(&self) -> bool {
3207 !self.has_edits_since(&self.preview_version)
3208 }
3209}
3210
3211#[doc(hidden)]
3212#[cfg(any(test, feature = "test-support"))]
3213impl Buffer {
3214 pub fn edit_via_marked_text(
3215 &mut self,
3216 marked_string: &str,
3217 autoindent_mode: Option<AutoindentMode>,
3218 cx: &mut Context<Self>,
3219 ) {
3220 let edits = self.edits_for_marked_text(marked_string);
3221 self.edit(edits, autoindent_mode, cx);
3222 }
3223
3224 pub fn set_group_interval(&mut self, group_interval: Duration) {
3225 self.text.set_group_interval(group_interval);
3226 }
3227
3228 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3229 where
3230 T: rand::Rng,
3231 {
3232 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3233 let mut last_end = None;
3234 for _ in 0..old_range_count {
3235 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3236 break;
3237 }
3238
3239 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3240 let mut range = self.random_byte_range(new_start, rng);
3241 if rng.random_bool(0.2) {
3242 mem::swap(&mut range.start, &mut range.end);
3243 }
3244 last_end = Some(range.end);
3245
3246 let new_text_len = rng.random_range(0..10);
3247 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3248 new_text = new_text.to_uppercase();
3249
3250 edits.push((range, new_text));
3251 }
3252 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3253 self.edit(edits, None, cx);
3254 }
3255
3256 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3257 let was_dirty = self.is_dirty();
3258 let old_version = self.version.clone();
3259
3260 let ops = self.text.randomly_undo_redo(rng);
3261 if !ops.is_empty() {
3262 for op in ops {
3263 self.send_operation(Operation::Buffer(op), true, cx);
3264 self.did_edit(&old_version, was_dirty, cx);
3265 }
3266 }
3267 }
3268}
3269
3270impl EventEmitter<BufferEvent> for Buffer {}
3271
3272impl Deref for Buffer {
3273 type Target = TextBuffer;
3274
3275 fn deref(&self) -> &Self::Target {
3276 &self.text
3277 }
3278}
3279
3280impl BufferSnapshot {
3281 /// Returns [`IndentSize`] for a given line that respects user settings and
3282 /// language preferences.
3283 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3284 indent_size_for_line(self, row)
3285 }
3286
3287 /// Returns [`IndentSize`] for a given position that respects user settings
3288 /// and language preferences.
3289 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3290 let settings = self.settings_at(position, cx);
3291 if settings.hard_tabs {
3292 IndentSize::tab()
3293 } else {
3294 IndentSize::spaces(settings.tab_size.get())
3295 }
3296 }
3297
3298 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3299 /// is passed in as `single_indent_size`.
3300 pub fn suggested_indents(
3301 &self,
3302 rows: impl Iterator<Item = u32>,
3303 single_indent_size: IndentSize,
3304 ) -> BTreeMap<u32, IndentSize> {
3305 let mut result = BTreeMap::new();
3306
3307 for row_range in contiguous_ranges(rows, 10) {
3308 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3309 Some(suggestions) => suggestions,
3310 _ => break,
3311 };
3312
3313 for (row, suggestion) in row_range.zip(suggestions) {
3314 let indent_size = if let Some(suggestion) = suggestion {
3315 result
3316 .get(&suggestion.basis_row)
3317 .copied()
3318 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3319 .with_delta(suggestion.delta, single_indent_size)
3320 } else {
3321 self.indent_size_for_line(row)
3322 };
3323
3324 result.insert(row, indent_size);
3325 }
3326 }
3327
3328 result
3329 }
3330
3331 fn suggest_autoindents(
3332 &self,
3333 row_range: Range<u32>,
3334 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3335 let config = &self.language.as_ref()?.config;
3336 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3337
3338 #[derive(Debug, Clone)]
3339 struct StartPosition {
3340 start: Point,
3341 suffix: SharedString,
3342 language: Arc<Language>,
3343 }
3344
3345 // Find the suggested indentation ranges based on the syntax tree.
3346 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3347 let end = Point::new(row_range.end, 0);
3348 let range = (start..end).to_offset(&self.text);
3349 let mut matches = self.syntax.matches_with_options(
3350 range.clone(),
3351 &self.text,
3352 TreeSitterOptions {
3353 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3354 max_start_depth: None,
3355 },
3356 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3357 );
3358 let indent_configs = matches
3359 .grammars()
3360 .iter()
3361 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3362 .collect::<Vec<_>>();
3363
3364 let mut indent_ranges = Vec::<Range<Point>>::new();
3365 let mut start_positions = Vec::<StartPosition>::new();
3366 let mut outdent_positions = Vec::<Point>::new();
3367 while let Some(mat) = matches.peek() {
3368 let mut start: Option<Point> = None;
3369 let mut end: Option<Point> = None;
3370
3371 let config = indent_configs[mat.grammar_index];
3372 for capture in mat.captures {
3373 if capture.index == config.indent_capture_ix {
3374 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3375 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3376 } else if Some(capture.index) == config.start_capture_ix {
3377 start = Some(Point::from_ts_point(capture.node.end_position()));
3378 } else if Some(capture.index) == config.end_capture_ix {
3379 end = Some(Point::from_ts_point(capture.node.start_position()));
3380 } else if Some(capture.index) == config.outdent_capture_ix {
3381 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3382 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3383 start_positions.push(StartPosition {
3384 start: Point::from_ts_point(capture.node.start_position()),
3385 suffix: suffix.clone(),
3386 language: mat.language.clone(),
3387 });
3388 }
3389 }
3390
3391 matches.advance();
3392 if let Some((start, end)) = start.zip(end) {
3393 if start.row == end.row {
3394 continue;
3395 }
3396 let range = start..end;
3397 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3398 Err(ix) => indent_ranges.insert(ix, range),
3399 Ok(ix) => {
3400 let prev_range = &mut indent_ranges[ix];
3401 prev_range.end = prev_range.end.max(range.end);
3402 }
3403 }
3404 }
3405 }
3406
3407 let mut error_ranges = Vec::<Range<Point>>::new();
3408 let mut matches = self
3409 .syntax
3410 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3411 while let Some(mat) = matches.peek() {
3412 let node = mat.captures[0].node;
3413 let start = Point::from_ts_point(node.start_position());
3414 let end = Point::from_ts_point(node.end_position());
3415 let range = start..end;
3416 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3417 Ok(ix) | Err(ix) => ix,
3418 };
3419 let mut end_ix = ix;
3420 while let Some(existing_range) = error_ranges.get(end_ix) {
3421 if existing_range.end < end {
3422 end_ix += 1;
3423 } else {
3424 break;
3425 }
3426 }
3427 error_ranges.splice(ix..end_ix, [range]);
3428 matches.advance();
3429 }
3430
3431 outdent_positions.sort();
3432 for outdent_position in outdent_positions {
3433 // find the innermost indent range containing this outdent_position
3434 // set its end to the outdent position
3435 if let Some(range_to_truncate) = indent_ranges
3436 .iter_mut()
3437 .rfind(|indent_range| indent_range.contains(&outdent_position))
3438 {
3439 range_to_truncate.end = outdent_position;
3440 }
3441 }
3442
3443 start_positions.sort_by_key(|b| b.start);
3444
3445 // Find the suggested indentation increases and decreased based on regexes.
3446 let mut regex_outdent_map = HashMap::default();
3447 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3448 let mut start_positions_iter = start_positions.iter().peekable();
3449
3450 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3451 self.for_each_line(
3452 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3453 ..Point::new(row_range.end, 0),
3454 |row, line| {
3455 let indent_len = self.indent_size_for_line(row).len;
3456 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3457 let row_language_config = row_language
3458 .as_ref()
3459 .map(|lang| lang.config())
3460 .unwrap_or(config);
3461
3462 if row_language_config
3463 .decrease_indent_pattern
3464 .as_ref()
3465 .is_some_and(|regex| regex.is_match(line))
3466 {
3467 indent_change_rows.push((row, Ordering::Less));
3468 }
3469 if row_language_config
3470 .increase_indent_pattern
3471 .as_ref()
3472 .is_some_and(|regex| regex.is_match(line))
3473 {
3474 indent_change_rows.push((row + 1, Ordering::Greater));
3475 }
3476 while let Some(pos) = start_positions_iter.peek() {
3477 if pos.start.row < row {
3478 let pos = start_positions_iter.next().unwrap().clone();
3479 last_seen_suffix
3480 .entry(pos.suffix.to_string())
3481 .or_default()
3482 .push(pos);
3483 } else {
3484 break;
3485 }
3486 }
3487 for rule in &row_language_config.decrease_indent_patterns {
3488 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3489 let row_start_column = self.indent_size_for_line(row).len;
3490 let basis_row = rule
3491 .valid_after
3492 .iter()
3493 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3494 .flatten()
3495 .filter(|pos| {
3496 row_language
3497 .as_ref()
3498 .or(self.language.as_ref())
3499 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3500 })
3501 .filter(|pos| pos.start.column <= row_start_column)
3502 .max_by_key(|pos| pos.start.row);
3503 if let Some(outdent_to) = basis_row {
3504 regex_outdent_map.insert(row, outdent_to.start.row);
3505 }
3506 break;
3507 }
3508 }
3509 },
3510 );
3511
3512 let mut indent_changes = indent_change_rows.into_iter().peekable();
3513 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3514 prev_non_blank_row.unwrap_or(0)
3515 } else {
3516 row_range.start.saturating_sub(1)
3517 };
3518
3519 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3520 Some(row_range.map(move |row| {
3521 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3522
3523 let mut indent_from_prev_row = false;
3524 let mut outdent_from_prev_row = false;
3525 let mut outdent_to_row = u32::MAX;
3526 let mut from_regex = false;
3527
3528 while let Some((indent_row, delta)) = indent_changes.peek() {
3529 match indent_row.cmp(&row) {
3530 Ordering::Equal => match delta {
3531 Ordering::Less => {
3532 from_regex = true;
3533 outdent_from_prev_row = true
3534 }
3535 Ordering::Greater => {
3536 indent_from_prev_row = true;
3537 from_regex = true
3538 }
3539 _ => {}
3540 },
3541
3542 Ordering::Greater => break,
3543 Ordering::Less => {}
3544 }
3545
3546 indent_changes.next();
3547 }
3548
3549 for range in &indent_ranges {
3550 if range.start.row >= row {
3551 break;
3552 }
3553 if range.start.row == prev_row && range.end > row_start {
3554 indent_from_prev_row = true;
3555 }
3556 if range.end > prev_row_start && range.end <= row_start {
3557 outdent_to_row = outdent_to_row.min(range.start.row);
3558 }
3559 }
3560
3561 if let Some(basis_row) = regex_outdent_map.get(&row) {
3562 indent_from_prev_row = false;
3563 outdent_to_row = *basis_row;
3564 from_regex = true;
3565 }
3566
3567 let within_error = error_ranges
3568 .iter()
3569 .any(|e| e.start.row < row && e.end > row_start);
3570
3571 let suggestion = if outdent_to_row == prev_row
3572 || (outdent_from_prev_row && indent_from_prev_row)
3573 {
3574 Some(IndentSuggestion {
3575 basis_row: prev_row,
3576 delta: Ordering::Equal,
3577 within_error: within_error && !from_regex,
3578 })
3579 } else if indent_from_prev_row {
3580 Some(IndentSuggestion {
3581 basis_row: prev_row,
3582 delta: Ordering::Greater,
3583 within_error: within_error && !from_regex,
3584 })
3585 } else if outdent_to_row < prev_row {
3586 Some(IndentSuggestion {
3587 basis_row: outdent_to_row,
3588 delta: Ordering::Equal,
3589 within_error: within_error && !from_regex,
3590 })
3591 } else if outdent_from_prev_row {
3592 Some(IndentSuggestion {
3593 basis_row: prev_row,
3594 delta: Ordering::Less,
3595 within_error: within_error && !from_regex,
3596 })
3597 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3598 {
3599 Some(IndentSuggestion {
3600 basis_row: prev_row,
3601 delta: Ordering::Equal,
3602 within_error: within_error && !from_regex,
3603 })
3604 } else {
3605 None
3606 };
3607
3608 prev_row = row;
3609 prev_row_start = row_start;
3610 suggestion
3611 }))
3612 }
3613
3614 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3615 while row > 0 {
3616 row -= 1;
3617 if !self.is_line_blank(row) {
3618 return Some(row);
3619 }
3620 }
3621 None
3622 }
3623
3624 #[ztracing::instrument(skip_all)]
3625 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3626 let captures = self.syntax.captures(range, &self.text, |grammar| {
3627 grammar
3628 .highlights_config
3629 .as_ref()
3630 .map(|config| &config.query)
3631 });
3632 let highlight_maps = captures
3633 .grammars()
3634 .iter()
3635 .map(|grammar| grammar.highlight_map())
3636 .collect();
3637 (captures, highlight_maps)
3638 }
3639
3640 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3641 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3642 /// returned in chunks where each chunk has a single syntax highlighting style and
3643 /// diagnostic status.
3644 #[ztracing::instrument(skip_all)]
3645 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3646 let range = range.start.to_offset(self)..range.end.to_offset(self);
3647
3648 let mut syntax = None;
3649 if language_aware {
3650 syntax = Some(self.get_highlights(range.clone()));
3651 }
3652 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3653 let diagnostics = language_aware;
3654 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3655 }
3656
3657 pub fn highlighted_text_for_range<T: ToOffset>(
3658 &self,
3659 range: Range<T>,
3660 override_style: Option<HighlightStyle>,
3661 syntax_theme: &SyntaxTheme,
3662 ) -> HighlightedText {
3663 HighlightedText::from_buffer_range(
3664 range,
3665 &self.text,
3666 &self.syntax,
3667 override_style,
3668 syntax_theme,
3669 )
3670 }
3671
3672 /// Invokes the given callback for each line of text in the given range of the buffer.
3673 /// Uses callback to avoid allocating a string for each line.
3674 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3675 let mut line = String::new();
3676 let mut row = range.start.row;
3677 for chunk in self
3678 .as_rope()
3679 .chunks_in_range(range.to_offset(self))
3680 .chain(["\n"])
3681 {
3682 for (newline_ix, text) in chunk.split('\n').enumerate() {
3683 if newline_ix > 0 {
3684 callback(row, &line);
3685 row += 1;
3686 line.clear();
3687 }
3688 line.push_str(text);
3689 }
3690 }
3691 }
3692
3693 /// Iterates over every [`SyntaxLayer`] in the buffer.
3694 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3695 self.syntax_layers_for_range(0..self.len(), true)
3696 }
3697
3698 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3699 let offset = position.to_offset(self);
3700 self.syntax_layers_for_range(offset..offset, false)
3701 .filter(|l| {
3702 if let Some(ranges) = l.included_sub_ranges {
3703 ranges.iter().any(|range| {
3704 let start = range.start.to_offset(self);
3705 start <= offset && {
3706 let end = range.end.to_offset(self);
3707 offset < end
3708 }
3709 })
3710 } else {
3711 l.node().start_byte() <= offset && l.node().end_byte() > offset
3712 }
3713 })
3714 .last()
3715 }
3716
3717 pub fn syntax_layers_for_range<D: ToOffset>(
3718 &self,
3719 range: Range<D>,
3720 include_hidden: bool,
3721 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3722 self.syntax
3723 .layers_for_range(range, &self.text, include_hidden)
3724 }
3725
3726 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3727 &self,
3728 range: Range<D>,
3729 ) -> Option<SyntaxLayer<'_>> {
3730 let range = range.to_offset(self);
3731 self.syntax
3732 .layers_for_range(range, &self.text, false)
3733 .max_by(|a, b| {
3734 if a.depth != b.depth {
3735 a.depth.cmp(&b.depth)
3736 } else if a.offset.0 != b.offset.0 {
3737 a.offset.0.cmp(&b.offset.0)
3738 } else {
3739 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3740 }
3741 })
3742 }
3743
3744 /// Returns the [`ModelineSettings`].
3745 pub fn modeline(&self) -> Option<&Arc<ModelineSettings>> {
3746 self.modeline.as_ref()
3747 }
3748
3749 /// Returns the main [`Language`].
3750 pub fn language(&self) -> Option<&Arc<Language>> {
3751 self.language.as_ref()
3752 }
3753
3754 /// Returns the [`Language`] at the given location.
3755 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3756 self.syntax_layer_at(position)
3757 .map(|info| info.language)
3758 .or(self.language.as_ref())
3759 }
3760
3761 /// Returns the settings for the language at the given location.
3762 pub fn settings_at<'a, D: ToOffset>(
3763 &'a self,
3764 position: D,
3765 cx: &'a App,
3766 ) -> Cow<'a, LanguageSettings> {
3767 LanguageSettings::for_buffer_snapshot(self, Some(position.to_offset(self)), cx)
3768 }
3769
3770 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3771 CharClassifier::new(self.language_scope_at(point))
3772 }
3773
3774 /// Returns the [`LanguageScope`] at the given location.
3775 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3776 let offset = position.to_offset(self);
3777 let mut scope = None;
3778 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3779
3780 // Use the layer that has the smallest node intersecting the given point.
3781 for layer in self
3782 .syntax
3783 .layers_for_range(offset..offset, &self.text, false)
3784 {
3785 let mut cursor = layer.node().walk();
3786
3787 let mut range = None;
3788 loop {
3789 let child_range = cursor.node().byte_range();
3790 if !child_range.contains(&offset) {
3791 break;
3792 }
3793
3794 range = Some(child_range);
3795 if cursor.goto_first_child_for_byte(offset).is_none() {
3796 break;
3797 }
3798 }
3799
3800 if let Some(range) = range
3801 && smallest_range_and_depth.as_ref().is_none_or(
3802 |(smallest_range, smallest_range_depth)| {
3803 if layer.depth > *smallest_range_depth {
3804 true
3805 } else if layer.depth == *smallest_range_depth {
3806 range.len() < smallest_range.len()
3807 } else {
3808 false
3809 }
3810 },
3811 )
3812 {
3813 smallest_range_and_depth = Some((range, layer.depth));
3814 scope = Some(LanguageScope {
3815 language: layer.language.clone(),
3816 override_id: layer.override_id(offset, &self.text),
3817 });
3818 }
3819 }
3820
3821 scope.or_else(|| {
3822 self.language.clone().map(|language| LanguageScope {
3823 language,
3824 override_id: None,
3825 })
3826 })
3827 }
3828
3829 /// Returns a tuple of the range and character kind of the word
3830 /// surrounding the given position.
3831 pub fn surrounding_word<T: ToOffset>(
3832 &self,
3833 start: T,
3834 scope_context: Option<CharScopeContext>,
3835 ) -> (Range<usize>, Option<CharKind>) {
3836 let mut start = start.to_offset(self);
3837 let mut end = start;
3838 let mut next_chars = self.chars_at(start).take(128).peekable();
3839 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3840
3841 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3842 let word_kind = cmp::max(
3843 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3844 next_chars.peek().copied().map(|c| classifier.kind(c)),
3845 );
3846
3847 for ch in prev_chars {
3848 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3849 start -= ch.len_utf8();
3850 } else {
3851 break;
3852 }
3853 }
3854
3855 for ch in next_chars {
3856 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3857 end += ch.len_utf8();
3858 } else {
3859 break;
3860 }
3861 }
3862
3863 (start..end, word_kind)
3864 }
3865
3866 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3867 /// range. When `require_larger` is true, the node found must be larger than the query range.
3868 ///
3869 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3870 /// be moved to the root of the tree.
3871 fn goto_node_enclosing_range(
3872 cursor: &mut tree_sitter::TreeCursor,
3873 query_range: &Range<usize>,
3874 require_larger: bool,
3875 ) -> bool {
3876 let mut ascending = false;
3877 loop {
3878 let mut range = cursor.node().byte_range();
3879 if query_range.is_empty() {
3880 // When the query range is empty and the current node starts after it, move to the
3881 // previous sibling to find the node the containing node.
3882 if range.start > query_range.start {
3883 cursor.goto_previous_sibling();
3884 range = cursor.node().byte_range();
3885 }
3886 } else {
3887 // When the query range is non-empty and the current node ends exactly at the start,
3888 // move to the next sibling to find a node that extends beyond the start.
3889 if range.end == query_range.start {
3890 cursor.goto_next_sibling();
3891 range = cursor.node().byte_range();
3892 }
3893 }
3894
3895 let encloses = range.contains_inclusive(query_range)
3896 && (!require_larger || range.len() > query_range.len());
3897 if !encloses {
3898 ascending = true;
3899 if !cursor.goto_parent() {
3900 return false;
3901 }
3902 continue;
3903 } else if ascending {
3904 return true;
3905 }
3906
3907 // Descend into the current node.
3908 if cursor
3909 .goto_first_child_for_byte(query_range.start)
3910 .is_none()
3911 {
3912 return true;
3913 }
3914 }
3915 }
3916
3917 pub fn syntax_ancestor<'a, T: ToOffset>(
3918 &'a self,
3919 range: Range<T>,
3920 ) -> Option<tree_sitter::Node<'a>> {
3921 let range = range.start.to_offset(self)..range.end.to_offset(self);
3922 let mut result: Option<tree_sitter::Node<'a>> = None;
3923 for layer in self
3924 .syntax
3925 .layers_for_range(range.clone(), &self.text, true)
3926 {
3927 let mut cursor = layer.node().walk();
3928
3929 // Find the node that both contains the range and is larger than it.
3930 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3931 continue;
3932 }
3933
3934 let left_node = cursor.node();
3935 let mut layer_result = left_node;
3936
3937 // For an empty range, try to find another node immediately to the right of the range.
3938 if left_node.end_byte() == range.start {
3939 let mut right_node = None;
3940 while !cursor.goto_next_sibling() {
3941 if !cursor.goto_parent() {
3942 break;
3943 }
3944 }
3945
3946 while cursor.node().start_byte() == range.start {
3947 right_node = Some(cursor.node());
3948 if !cursor.goto_first_child() {
3949 break;
3950 }
3951 }
3952
3953 // If there is a candidate node on both sides of the (empty) range, then
3954 // decide between the two by favoring a named node over an anonymous token.
3955 // If both nodes are the same in that regard, favor the right one.
3956 if let Some(right_node) = right_node
3957 && (right_node.is_named() || !left_node.is_named())
3958 {
3959 layer_result = right_node;
3960 }
3961 }
3962
3963 if let Some(previous_result) = &result
3964 && previous_result.byte_range().len() < layer_result.byte_range().len()
3965 {
3966 continue;
3967 }
3968 result = Some(layer_result);
3969 }
3970
3971 result
3972 }
3973
3974 /// Find the previous sibling syntax node at the given range.
3975 ///
3976 /// This function locates the syntax node that precedes the node containing
3977 /// the given range. It searches hierarchically by:
3978 /// 1. Finding the node that contains the given range
3979 /// 2. Looking for the previous sibling at the same tree level
3980 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3981 ///
3982 /// Returns `None` if there is no previous sibling at any ancestor level.
3983 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3984 &'a self,
3985 range: Range<T>,
3986 ) -> Option<tree_sitter::Node<'a>> {
3987 let range = range.start.to_offset(self)..range.end.to_offset(self);
3988 let mut result: Option<tree_sitter::Node<'a>> = None;
3989
3990 for layer in self
3991 .syntax
3992 .layers_for_range(range.clone(), &self.text, true)
3993 {
3994 let mut cursor = layer.node().walk();
3995
3996 // Find the node that contains the range
3997 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3998 continue;
3999 }
4000
4001 // Look for the previous sibling, moving up ancestor levels if needed
4002 loop {
4003 if cursor.goto_previous_sibling() {
4004 let layer_result = cursor.node();
4005
4006 if let Some(previous_result) = &result {
4007 if previous_result.byte_range().end < layer_result.byte_range().end {
4008 continue;
4009 }
4010 }
4011 result = Some(layer_result);
4012 break;
4013 }
4014
4015 // No sibling found at this level, try moving up to parent
4016 if !cursor.goto_parent() {
4017 break;
4018 }
4019 }
4020 }
4021
4022 result
4023 }
4024
4025 /// Find the next sibling syntax node at the given range.
4026 ///
4027 /// This function locates the syntax node that follows the node containing
4028 /// the given range. It searches hierarchically by:
4029 /// 1. Finding the node that contains the given range
4030 /// 2. Looking for the next sibling at the same tree level
4031 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4032 ///
4033 /// Returns `None` if there is no next sibling at any ancestor level.
4034 pub fn syntax_next_sibling<'a, T: ToOffset>(
4035 &'a self,
4036 range: Range<T>,
4037 ) -> Option<tree_sitter::Node<'a>> {
4038 let range = range.start.to_offset(self)..range.end.to_offset(self);
4039 let mut result: Option<tree_sitter::Node<'a>> = None;
4040
4041 for layer in self
4042 .syntax
4043 .layers_for_range(range.clone(), &self.text, true)
4044 {
4045 let mut cursor = layer.node().walk();
4046
4047 // Find the node that contains the range
4048 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4049 continue;
4050 }
4051
4052 // Look for the next sibling, moving up ancestor levels if needed
4053 loop {
4054 if cursor.goto_next_sibling() {
4055 let layer_result = cursor.node();
4056
4057 if let Some(previous_result) = &result {
4058 if previous_result.byte_range().start > layer_result.byte_range().start {
4059 continue;
4060 }
4061 }
4062 result = Some(layer_result);
4063 break;
4064 }
4065
4066 // No sibling found at this level, try moving up to parent
4067 if !cursor.goto_parent() {
4068 break;
4069 }
4070 }
4071 }
4072
4073 result
4074 }
4075
4076 /// Returns the root syntax node within the given row
4077 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4078 let start_offset = position.to_offset(self);
4079
4080 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4081
4082 let layer = self
4083 .syntax
4084 .layers_for_range(start_offset..start_offset, &self.text, true)
4085 .next()?;
4086
4087 let mut cursor = layer.node().walk();
4088
4089 // Descend to the first leaf that touches the start of the range.
4090 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4091 if cursor.node().end_byte() == start_offset {
4092 cursor.goto_next_sibling();
4093 }
4094 }
4095
4096 // Ascend to the root node within the same row.
4097 while cursor.goto_parent() {
4098 if cursor.node().start_position().row != row {
4099 break;
4100 }
4101 }
4102
4103 Some(cursor.node())
4104 }
4105
4106 /// Returns the outline for the buffer.
4107 ///
4108 /// This method allows passing an optional [`SyntaxTheme`] to
4109 /// syntax-highlight the returned symbols.
4110 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4111 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4112 }
4113
4114 /// Returns all the symbols that contain the given position.
4115 ///
4116 /// This method allows passing an optional [`SyntaxTheme`] to
4117 /// syntax-highlight the returned symbols.
4118 pub fn symbols_containing<T: ToOffset>(
4119 &self,
4120 position: T,
4121 theme: Option<&SyntaxTheme>,
4122 ) -> Vec<OutlineItem<Anchor>> {
4123 let position = position.to_offset(self);
4124 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4125 let end = self.clip_offset(position + 1, Bias::Right);
4126 let mut items = self.outline_items_containing(start..end, false, theme);
4127 let mut prev_depth = None;
4128 items.retain(|item| {
4129 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4130 prev_depth = Some(item.depth);
4131 result
4132 });
4133 items
4134 }
4135
4136 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4137 let range = range.to_offset(self);
4138 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4139 grammar.outline_config.as_ref().map(|c| &c.query)
4140 });
4141 let configs = matches
4142 .grammars()
4143 .iter()
4144 .map(|g| g.outline_config.as_ref().unwrap())
4145 .collect::<Vec<_>>();
4146
4147 while let Some(mat) = matches.peek() {
4148 let config = &configs[mat.grammar_index];
4149 let containing_item_node = maybe!({
4150 let item_node = mat.captures.iter().find_map(|cap| {
4151 if cap.index == config.item_capture_ix {
4152 Some(cap.node)
4153 } else {
4154 None
4155 }
4156 })?;
4157
4158 let item_byte_range = item_node.byte_range();
4159 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4160 None
4161 } else {
4162 Some(item_node)
4163 }
4164 });
4165
4166 if let Some(item_node) = containing_item_node {
4167 return Some(
4168 Point::from_ts_point(item_node.start_position())
4169 ..Point::from_ts_point(item_node.end_position()),
4170 );
4171 }
4172
4173 matches.advance();
4174 }
4175 None
4176 }
4177
4178 pub fn outline_items_containing<T: ToOffset>(
4179 &self,
4180 range: Range<T>,
4181 include_extra_context: bool,
4182 theme: Option<&SyntaxTheme>,
4183 ) -> Vec<OutlineItem<Anchor>> {
4184 self.outline_items_containing_internal(
4185 range,
4186 include_extra_context,
4187 theme,
4188 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4189 )
4190 }
4191
4192 pub fn outline_items_as_points_containing<T: ToOffset>(
4193 &self,
4194 range: Range<T>,
4195 include_extra_context: bool,
4196 theme: Option<&SyntaxTheme>,
4197 ) -> Vec<OutlineItem<Point>> {
4198 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4199 range
4200 })
4201 }
4202
4203 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4204 &self,
4205 range: Range<T>,
4206 include_extra_context: bool,
4207 theme: Option<&SyntaxTheme>,
4208 ) -> Vec<OutlineItem<usize>> {
4209 self.outline_items_containing_internal(
4210 range,
4211 include_extra_context,
4212 theme,
4213 |buffer, range| range.to_offset(buffer),
4214 )
4215 }
4216
4217 fn outline_items_containing_internal<T: ToOffset, U>(
4218 &self,
4219 range: Range<T>,
4220 include_extra_context: bool,
4221 theme: Option<&SyntaxTheme>,
4222 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4223 ) -> Vec<OutlineItem<U>> {
4224 let range = range.to_offset(self);
4225 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4226 grammar.outline_config.as_ref().map(|c| &c.query)
4227 });
4228
4229 let mut items = Vec::new();
4230 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4231 while let Some(mat) = matches.peek() {
4232 let config = matches.grammars()[mat.grammar_index]
4233 .outline_config
4234 .as_ref()
4235 .unwrap();
4236 if let Some(item) =
4237 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4238 {
4239 items.push(item);
4240 } else if let Some(capture) = mat
4241 .captures
4242 .iter()
4243 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4244 {
4245 let capture_range = capture.node.start_position()..capture.node.end_position();
4246 let mut capture_row_range =
4247 capture_range.start.row as u32..capture_range.end.row as u32;
4248 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4249 {
4250 capture_row_range.end -= 1;
4251 }
4252 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4253 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4254 last_row_range.end = capture_row_range.end;
4255 } else {
4256 annotation_row_ranges.push(capture_row_range);
4257 }
4258 } else {
4259 annotation_row_ranges.push(capture_row_range);
4260 }
4261 }
4262 matches.advance();
4263 }
4264
4265 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4266
4267 // Assign depths based on containment relationships and convert to anchors.
4268 let mut item_ends_stack = Vec::<Point>::new();
4269 let mut anchor_items = Vec::new();
4270 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4271 for item in items {
4272 while let Some(last_end) = item_ends_stack.last().copied() {
4273 if last_end < item.range.end {
4274 item_ends_stack.pop();
4275 } else {
4276 break;
4277 }
4278 }
4279
4280 let mut annotation_row_range = None;
4281 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4282 let row_preceding_item = item.range.start.row.saturating_sub(1);
4283 if next_annotation_row_range.end < row_preceding_item {
4284 annotation_row_ranges.next();
4285 } else {
4286 if next_annotation_row_range.end == row_preceding_item {
4287 annotation_row_range = Some(next_annotation_row_range.clone());
4288 annotation_row_ranges.next();
4289 }
4290 break;
4291 }
4292 }
4293
4294 anchor_items.push(OutlineItem {
4295 depth: item_ends_stack.len(),
4296 range: range_callback(self, item.range.clone()),
4297 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4298 text: item.text,
4299 highlight_ranges: item.highlight_ranges,
4300 name_ranges: item.name_ranges,
4301 body_range: item.body_range.map(|r| range_callback(self, r)),
4302 annotation_range: annotation_row_range.map(|annotation_range| {
4303 let point_range = Point::new(annotation_range.start, 0)
4304 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4305 range_callback(self, point_range)
4306 }),
4307 });
4308 item_ends_stack.push(item.range.end);
4309 }
4310
4311 anchor_items
4312 }
4313
4314 fn next_outline_item(
4315 &self,
4316 config: &OutlineConfig,
4317 mat: &SyntaxMapMatch,
4318 range: &Range<usize>,
4319 include_extra_context: bool,
4320 theme: Option<&SyntaxTheme>,
4321 ) -> Option<OutlineItem<Point>> {
4322 let item_node = mat.captures.iter().find_map(|cap| {
4323 if cap.index == config.item_capture_ix {
4324 Some(cap.node)
4325 } else {
4326 None
4327 }
4328 })?;
4329
4330 let item_byte_range = item_node.byte_range();
4331 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4332 return None;
4333 }
4334 let item_point_range = Point::from_ts_point(item_node.start_position())
4335 ..Point::from_ts_point(item_node.end_position());
4336
4337 let mut open_point = None;
4338 let mut close_point = None;
4339
4340 let mut buffer_ranges = Vec::new();
4341 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4342 let mut range = node.start_byte()..node.end_byte();
4343 let start = node.start_position();
4344 if node.end_position().row > start.row {
4345 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4346 }
4347
4348 if !range.is_empty() {
4349 buffer_ranges.push((range, node_is_name));
4350 }
4351 };
4352
4353 for capture in mat.captures {
4354 if capture.index == config.name_capture_ix {
4355 add_to_buffer_ranges(capture.node, true);
4356 } else if Some(capture.index) == config.context_capture_ix
4357 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4358 {
4359 add_to_buffer_ranges(capture.node, false);
4360 } else {
4361 if Some(capture.index) == config.open_capture_ix {
4362 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4363 } else if Some(capture.index) == config.close_capture_ix {
4364 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4365 }
4366 }
4367 }
4368
4369 if buffer_ranges.is_empty() {
4370 return None;
4371 }
4372 let source_range_for_text =
4373 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4374
4375 let mut text = String::new();
4376 let mut highlight_ranges = Vec::new();
4377 let mut name_ranges = Vec::new();
4378 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4379 let mut last_buffer_range_end = 0;
4380 for (buffer_range, is_name) in buffer_ranges {
4381 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4382 if space_added {
4383 text.push(' ');
4384 }
4385 let before_append_len = text.len();
4386 let mut offset = buffer_range.start;
4387 chunks.seek(buffer_range.clone());
4388 for mut chunk in chunks.by_ref() {
4389 if chunk.text.len() > buffer_range.end - offset {
4390 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4391 offset = buffer_range.end;
4392 } else {
4393 offset += chunk.text.len();
4394 }
4395 let style = chunk
4396 .syntax_highlight_id
4397 .zip(theme)
4398 .and_then(|(highlight, theme)| highlight.style(theme));
4399 if let Some(style) = style {
4400 let start = text.len();
4401 let end = start + chunk.text.len();
4402 highlight_ranges.push((start..end, style));
4403 }
4404 text.push_str(chunk.text);
4405 if offset >= buffer_range.end {
4406 break;
4407 }
4408 }
4409 if is_name {
4410 let after_append_len = text.len();
4411 let start = if space_added && !name_ranges.is_empty() {
4412 before_append_len - 1
4413 } else {
4414 before_append_len
4415 };
4416 name_ranges.push(start..after_append_len);
4417 }
4418 last_buffer_range_end = buffer_range.end;
4419 }
4420
4421 Some(OutlineItem {
4422 depth: 0, // We'll calculate the depth later
4423 range: item_point_range,
4424 source_range_for_text: source_range_for_text.to_point(self),
4425 text,
4426 highlight_ranges,
4427 name_ranges,
4428 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4429 annotation_range: None,
4430 })
4431 }
4432
4433 pub fn function_body_fold_ranges<T: ToOffset>(
4434 &self,
4435 within: Range<T>,
4436 ) -> impl Iterator<Item = Range<usize>> + '_ {
4437 self.text_object_ranges(within, TreeSitterOptions::default())
4438 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4439 }
4440
4441 /// For each grammar in the language, runs the provided
4442 /// [`tree_sitter::Query`] against the given range.
4443 pub fn matches(
4444 &self,
4445 range: Range<usize>,
4446 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4447 ) -> SyntaxMapMatches<'_> {
4448 self.syntax.matches(range, self, query)
4449 }
4450
4451 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4452 /// Hence, may return more bracket pairs than the range contains.
4453 ///
4454 /// Will omit known chunks.
4455 /// The resulting bracket match collections are not ordered.
4456 pub fn fetch_bracket_ranges(
4457 &self,
4458 range: Range<usize>,
4459 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4460 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4461 let mut all_bracket_matches = HashMap::default();
4462
4463 for chunk in self
4464 .tree_sitter_data
4465 .chunks
4466 .applicable_chunks(&[range.to_point(self)])
4467 {
4468 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4469 continue;
4470 }
4471 let chunk_range = chunk.anchor_range();
4472 let chunk_range = chunk_range.to_offset(&self);
4473
4474 if let Some(cached_brackets) =
4475 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4476 {
4477 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4478 continue;
4479 }
4480
4481 let mut all_brackets: Vec<(BracketMatch<usize>, bool)> = Vec::new();
4482 let mut opens = Vec::new();
4483 let mut color_pairs = Vec::new();
4484
4485 let mut matches = self.syntax.matches_with_options(
4486 chunk_range.clone(),
4487 &self.text,
4488 TreeSitterOptions {
4489 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4490 max_start_depth: None,
4491 },
4492 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4493 );
4494 let configs = matches
4495 .grammars()
4496 .iter()
4497 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4498 .collect::<Vec<_>>();
4499
4500 // Group matches by open range so we can either trust grammar output
4501 // or repair it by picking a single closest close per open.
4502 let mut open_to_close_ranges = BTreeMap::new();
4503 while let Some(mat) = matches.peek() {
4504 let mut open = None;
4505 let mut close = None;
4506 let syntax_layer_depth = mat.depth;
4507 let config = configs[mat.grammar_index];
4508 let pattern = &config.patterns[mat.pattern_index];
4509 for capture in mat.captures {
4510 if capture.index == config.open_capture_ix {
4511 open = Some(capture.node.byte_range());
4512 } else if capture.index == config.close_capture_ix {
4513 close = Some(capture.node.byte_range());
4514 }
4515 }
4516
4517 matches.advance();
4518
4519 let Some((open_range, close_range)) = open.zip(close) else {
4520 continue;
4521 };
4522
4523 let bracket_range = open_range.start..=close_range.end;
4524 if !bracket_range.overlaps(&chunk_range) {
4525 continue;
4526 }
4527
4528 open_to_close_ranges
4529 .entry((open_range.start, open_range.end))
4530 .or_insert_with(BTreeMap::new)
4531 .insert(
4532 (close_range.start, close_range.end),
4533 BracketMatch {
4534 open_range: open_range.clone(),
4535 close_range: close_range.clone(),
4536 syntax_layer_depth,
4537 newline_only: pattern.newline_only,
4538 color_index: None,
4539 },
4540 );
4541
4542 all_brackets.push((
4543 BracketMatch {
4544 open_range,
4545 close_range,
4546 syntax_layer_depth,
4547 newline_only: pattern.newline_only,
4548 color_index: None,
4549 },
4550 pattern.rainbow_exclude,
4551 ));
4552 }
4553
4554 let has_bogus_matches = open_to_close_ranges
4555 .iter()
4556 .any(|(_, end_ranges)| end_ranges.len() > 1);
4557 if has_bogus_matches {
4558 // Grammar is producing bogus matches where one open is paired with multiple
4559 // closes. Build a valid stack by walking through positions in order.
4560 // For each close, we know the expected open_len from tree-sitter matches.
4561
4562 // Map each close to its expected open length (for inferring opens)
4563 let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets
4564 .iter()
4565 .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len()))
4566 .collect();
4567
4568 // Collect unique opens and closes within this chunk
4569 let mut unique_opens: HashSet<(usize, usize)> = all_brackets
4570 .iter()
4571 .map(|(m, _)| (m.open_range.start, m.open_range.end))
4572 .filter(|(start, _)| chunk_range.contains(start))
4573 .collect();
4574
4575 let mut unique_closes: Vec<(usize, usize)> = all_brackets
4576 .iter()
4577 .map(|(m, _)| (m.close_range.start, m.close_range.end))
4578 .filter(|(start, _)| chunk_range.contains(start))
4579 .collect();
4580 unique_closes.sort();
4581 unique_closes.dedup();
4582
4583 // Build valid pairs by walking through closes in order
4584 let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4585 unique_opens_vec.sort();
4586
4587 let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default();
4588 let mut open_stack: Vec<(usize, usize)> = Vec::new();
4589 let mut open_idx = 0;
4590
4591 for close in &unique_closes {
4592 // Push all opens before this close onto stack
4593 while open_idx < unique_opens_vec.len()
4594 && unique_opens_vec[open_idx].0 < close.0
4595 {
4596 open_stack.push(unique_opens_vec[open_idx]);
4597 open_idx += 1;
4598 }
4599
4600 // Try to match with most recent open
4601 if let Some(open) = open_stack.pop() {
4602 valid_pairs.insert((open, *close));
4603 } else if let Some(&open_len) = close_to_open_len.get(close) {
4604 // No open on stack - infer one based on expected open_len
4605 if close.0 >= open_len {
4606 let inferred = (close.0 - open_len, close.0);
4607 unique_opens.insert(inferred);
4608 valid_pairs.insert((inferred, *close));
4609 all_brackets.push((
4610 BracketMatch {
4611 open_range: inferred.0..inferred.1,
4612 close_range: close.0..close.1,
4613 newline_only: false,
4614 syntax_layer_depth: 0,
4615 color_index: None,
4616 },
4617 false,
4618 ));
4619 }
4620 }
4621 }
4622
4623 all_brackets.retain(|(m, _)| {
4624 let open = (m.open_range.start, m.open_range.end);
4625 let close = (m.close_range.start, m.close_range.end);
4626 valid_pairs.contains(&(open, close))
4627 });
4628 }
4629
4630 let mut all_brackets = all_brackets
4631 .into_iter()
4632 .enumerate()
4633 .map(|(index, (bracket_match, rainbow_exclude))| {
4634 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4635 // bracket will match the entire tag with all text inside.
4636 // For now, avoid highlighting any pair that has more than single char in each bracket.
4637 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4638 let should_color = !rainbow_exclude
4639 && (bracket_match.open_range.len() == 1
4640 || bracket_match.close_range.len() == 1);
4641 if should_color {
4642 opens.push(bracket_match.open_range.clone());
4643 color_pairs.push((
4644 bracket_match.open_range.clone(),
4645 bracket_match.close_range.clone(),
4646 index,
4647 ));
4648 }
4649 bracket_match
4650 })
4651 .collect::<Vec<_>>();
4652
4653 opens.sort_by_key(|r| (r.start, r.end));
4654 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4655 color_pairs.sort_by_key(|(_, close, _)| close.end);
4656
4657 let mut open_stack = Vec::new();
4658 let mut open_index = 0;
4659 for (open, close, index) in color_pairs {
4660 while open_index < opens.len() && opens[open_index].start < close.start {
4661 open_stack.push(opens[open_index].clone());
4662 open_index += 1;
4663 }
4664
4665 if open_stack.last() == Some(&open) {
4666 let depth_index = open_stack.len() - 1;
4667 all_brackets[index].color_index = Some(depth_index);
4668 open_stack.pop();
4669 }
4670 }
4671
4672 all_brackets.sort_by_key(|bracket_match| {
4673 (bracket_match.open_range.start, bracket_match.open_range.end)
4674 });
4675
4676 if let empty_slot @ None =
4677 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4678 {
4679 *empty_slot = Some(all_brackets.clone());
4680 }
4681 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4682 }
4683
4684 all_bracket_matches
4685 }
4686
4687 pub fn all_bracket_ranges(
4688 &self,
4689 range: Range<usize>,
4690 ) -> impl Iterator<Item = BracketMatch<usize>> {
4691 self.fetch_bracket_ranges(range.clone(), None)
4692 .into_values()
4693 .flatten()
4694 .filter(move |bracket_match| {
4695 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4696 bracket_range.overlaps(&range)
4697 })
4698 }
4699
4700 /// Returns bracket range pairs overlapping or adjacent to `range`
4701 pub fn bracket_ranges<T: ToOffset>(
4702 &self,
4703 range: Range<T>,
4704 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4705 // Find bracket pairs that *inclusively* contain the given range.
4706 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4707 self.all_bracket_ranges(range)
4708 .filter(|pair| !pair.newline_only)
4709 }
4710
4711 pub fn debug_variables_query<T: ToOffset>(
4712 &self,
4713 range: Range<T>,
4714 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4715 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4716
4717 let mut matches = self.syntax.matches_with_options(
4718 range.clone(),
4719 &self.text,
4720 TreeSitterOptions::default(),
4721 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4722 );
4723
4724 let configs = matches
4725 .grammars()
4726 .iter()
4727 .map(|grammar| grammar.debug_variables_config.as_ref())
4728 .collect::<Vec<_>>();
4729
4730 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4731
4732 iter::from_fn(move || {
4733 loop {
4734 while let Some(capture) = captures.pop() {
4735 if capture.0.overlaps(&range) {
4736 return Some(capture);
4737 }
4738 }
4739
4740 let mat = matches.peek()?;
4741
4742 let Some(config) = configs[mat.grammar_index].as_ref() else {
4743 matches.advance();
4744 continue;
4745 };
4746
4747 for capture in mat.captures {
4748 let Some(ix) = config
4749 .objects_by_capture_ix
4750 .binary_search_by_key(&capture.index, |e| e.0)
4751 .ok()
4752 else {
4753 continue;
4754 };
4755 let text_object = config.objects_by_capture_ix[ix].1;
4756 let byte_range = capture.node.byte_range();
4757
4758 let mut found = false;
4759 for (range, existing) in captures.iter_mut() {
4760 if existing == &text_object {
4761 range.start = range.start.min(byte_range.start);
4762 range.end = range.end.max(byte_range.end);
4763 found = true;
4764 break;
4765 }
4766 }
4767
4768 if !found {
4769 captures.push((byte_range, text_object));
4770 }
4771 }
4772
4773 matches.advance();
4774 }
4775 })
4776 }
4777
4778 pub fn text_object_ranges<T: ToOffset>(
4779 &self,
4780 range: Range<T>,
4781 options: TreeSitterOptions,
4782 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4783 let range =
4784 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4785
4786 let mut matches =
4787 self.syntax
4788 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4789 grammar.text_object_config.as_ref().map(|c| &c.query)
4790 });
4791
4792 let configs = matches
4793 .grammars()
4794 .iter()
4795 .map(|grammar| grammar.text_object_config.as_ref())
4796 .collect::<Vec<_>>();
4797
4798 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4799
4800 iter::from_fn(move || {
4801 loop {
4802 while let Some(capture) = captures.pop() {
4803 if capture.0.overlaps(&range) {
4804 return Some(capture);
4805 }
4806 }
4807
4808 let mat = matches.peek()?;
4809
4810 let Some(config) = configs[mat.grammar_index].as_ref() else {
4811 matches.advance();
4812 continue;
4813 };
4814
4815 for capture in mat.captures {
4816 let Some(ix) = config
4817 .text_objects_by_capture_ix
4818 .binary_search_by_key(&capture.index, |e| e.0)
4819 .ok()
4820 else {
4821 continue;
4822 };
4823 let text_object = config.text_objects_by_capture_ix[ix].1;
4824 let byte_range = capture.node.byte_range();
4825
4826 let mut found = false;
4827 for (range, existing) in captures.iter_mut() {
4828 if existing == &text_object {
4829 range.start = range.start.min(byte_range.start);
4830 range.end = range.end.max(byte_range.end);
4831 found = true;
4832 break;
4833 }
4834 }
4835
4836 if !found {
4837 captures.push((byte_range, text_object));
4838 }
4839 }
4840
4841 matches.advance();
4842 }
4843 })
4844 }
4845
4846 /// Returns enclosing bracket ranges containing the given range
4847 pub fn enclosing_bracket_ranges<T: ToOffset>(
4848 &self,
4849 range: Range<T>,
4850 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4851 let range = range.start.to_offset(self)..range.end.to_offset(self);
4852
4853 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4854 let max_depth = result
4855 .iter()
4856 .map(|mat| mat.syntax_layer_depth)
4857 .max()
4858 .unwrap_or(0);
4859 result.into_iter().filter(move |pair| {
4860 pair.open_range.start <= range.start
4861 && pair.close_range.end >= range.end
4862 && pair.syntax_layer_depth == max_depth
4863 })
4864 }
4865
4866 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4867 ///
4868 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4869 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4870 &self,
4871 range: Range<T>,
4872 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4873 ) -> Option<(Range<usize>, Range<usize>)> {
4874 let range = range.start.to_offset(self)..range.end.to_offset(self);
4875
4876 // Get the ranges of the innermost pair of brackets.
4877 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4878
4879 for pair in self.enclosing_bracket_ranges(range) {
4880 if let Some(range_filter) = range_filter
4881 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4882 {
4883 continue;
4884 }
4885
4886 let len = pair.close_range.end - pair.open_range.start;
4887
4888 if let Some((existing_open, existing_close)) = &result {
4889 let existing_len = existing_close.end - existing_open.start;
4890 if len > existing_len {
4891 continue;
4892 }
4893 }
4894
4895 result = Some((pair.open_range, pair.close_range));
4896 }
4897
4898 result
4899 }
4900
4901 /// Returns anchor ranges for any matches of the redaction query.
4902 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4903 /// will be run on the relevant section of the buffer.
4904 pub fn redacted_ranges<T: ToOffset>(
4905 &self,
4906 range: Range<T>,
4907 ) -> impl Iterator<Item = Range<usize>> + '_ {
4908 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4909 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4910 grammar
4911 .redactions_config
4912 .as_ref()
4913 .map(|config| &config.query)
4914 });
4915
4916 let configs = syntax_matches
4917 .grammars()
4918 .iter()
4919 .map(|grammar| grammar.redactions_config.as_ref())
4920 .collect::<Vec<_>>();
4921
4922 iter::from_fn(move || {
4923 let redacted_range = syntax_matches
4924 .peek()
4925 .and_then(|mat| {
4926 configs[mat.grammar_index].and_then(|config| {
4927 mat.captures
4928 .iter()
4929 .find(|capture| capture.index == config.redaction_capture_ix)
4930 })
4931 })
4932 .map(|mat| mat.node.byte_range());
4933 syntax_matches.advance();
4934 redacted_range
4935 })
4936 }
4937
4938 pub fn injections_intersecting_range<T: ToOffset>(
4939 &self,
4940 range: Range<T>,
4941 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4942 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4943
4944 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4945 grammar
4946 .injection_config
4947 .as_ref()
4948 .map(|config| &config.query)
4949 });
4950
4951 let configs = syntax_matches
4952 .grammars()
4953 .iter()
4954 .map(|grammar| grammar.injection_config.as_ref())
4955 .collect::<Vec<_>>();
4956
4957 iter::from_fn(move || {
4958 let ranges = syntax_matches.peek().and_then(|mat| {
4959 let config = &configs[mat.grammar_index]?;
4960 let content_capture_range = mat.captures.iter().find_map(|capture| {
4961 if capture.index == config.content_capture_ix {
4962 Some(capture.node.byte_range())
4963 } else {
4964 None
4965 }
4966 })?;
4967 let language = self.language_at(content_capture_range.start)?;
4968 Some((content_capture_range, language))
4969 });
4970 syntax_matches.advance();
4971 ranges
4972 })
4973 }
4974
4975 pub fn runnable_ranges(
4976 &self,
4977 offset_range: Range<usize>,
4978 ) -> impl Iterator<Item = RunnableRange> + '_ {
4979 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4980 grammar.runnable_config.as_ref().map(|config| &config.query)
4981 });
4982
4983 let test_configs = syntax_matches
4984 .grammars()
4985 .iter()
4986 .map(|grammar| grammar.runnable_config.as_ref())
4987 .collect::<Vec<_>>();
4988
4989 iter::from_fn(move || {
4990 loop {
4991 let mat = syntax_matches.peek()?;
4992
4993 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4994 let mut run_range = None;
4995 let full_range = mat.captures.iter().fold(
4996 Range {
4997 start: usize::MAX,
4998 end: 0,
4999 },
5000 |mut acc, next| {
5001 let byte_range = next.node.byte_range();
5002 if acc.start > byte_range.start {
5003 acc.start = byte_range.start;
5004 }
5005 if acc.end < byte_range.end {
5006 acc.end = byte_range.end;
5007 }
5008 acc
5009 },
5010 );
5011 if full_range.start > full_range.end {
5012 // We did not find a full spanning range of this match.
5013 return None;
5014 }
5015 let extra_captures: SmallVec<[_; 1]> =
5016 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
5017 test_configs
5018 .extra_captures
5019 .get(capture.index as usize)
5020 .cloned()
5021 .and_then(|tag_name| match tag_name {
5022 RunnableCapture::Named(name) => {
5023 Some((capture.node.byte_range(), name))
5024 }
5025 RunnableCapture::Run => {
5026 let _ = run_range.insert(capture.node.byte_range());
5027 None
5028 }
5029 })
5030 }));
5031 let run_range = run_range?;
5032 let tags = test_configs
5033 .query
5034 .property_settings(mat.pattern_index)
5035 .iter()
5036 .filter_map(|property| {
5037 if *property.key == *"tag" {
5038 property
5039 .value
5040 .as_ref()
5041 .map(|value| RunnableTag(value.to_string().into()))
5042 } else {
5043 None
5044 }
5045 })
5046 .collect();
5047 let extra_captures = extra_captures
5048 .into_iter()
5049 .map(|(range, name)| {
5050 (
5051 name.to_string(),
5052 self.text_for_range(range).collect::<String>(),
5053 )
5054 })
5055 .collect();
5056 // All tags should have the same range.
5057 Some(RunnableRange {
5058 run_range,
5059 full_range,
5060 runnable: Runnable {
5061 tags,
5062 language: mat.language,
5063 buffer: self.remote_id(),
5064 },
5065 extra_captures,
5066 buffer_id: self.remote_id(),
5067 })
5068 });
5069
5070 syntax_matches.advance();
5071 if test_range.is_some() {
5072 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5073 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5074 return test_range;
5075 }
5076 }
5077 })
5078 }
5079
5080 /// Returns selections for remote peers intersecting the given range.
5081 #[allow(clippy::type_complexity)]
5082 pub fn selections_in_range(
5083 &self,
5084 range: Range<Anchor>,
5085 include_local: bool,
5086 ) -> impl Iterator<
5087 Item = (
5088 ReplicaId,
5089 bool,
5090 CursorShape,
5091 impl Iterator<Item = &Selection<Anchor>> + '_,
5092 ),
5093 > + '_ {
5094 self.remote_selections
5095 .iter()
5096 .filter(move |(replica_id, set)| {
5097 (include_local || **replica_id != self.text.replica_id())
5098 && !set.selections.is_empty()
5099 })
5100 .map(move |(replica_id, set)| {
5101 let start_ix = match set.selections.binary_search_by(|probe| {
5102 probe.end.cmp(&range.start, self).then(Ordering::Greater)
5103 }) {
5104 Ok(ix) | Err(ix) => ix,
5105 };
5106 let end_ix = match set.selections.binary_search_by(|probe| {
5107 probe.start.cmp(&range.end, self).then(Ordering::Less)
5108 }) {
5109 Ok(ix) | Err(ix) => ix,
5110 };
5111
5112 (
5113 *replica_id,
5114 set.line_mode,
5115 set.cursor_shape,
5116 set.selections[start_ix..end_ix].iter(),
5117 )
5118 })
5119 }
5120
5121 /// Returns if the buffer contains any diagnostics.
5122 pub fn has_diagnostics(&self) -> bool {
5123 !self.diagnostics.is_empty()
5124 }
5125
5126 /// Returns all the diagnostics intersecting the given range.
5127 pub fn diagnostics_in_range<'a, T, O>(
5128 &'a self,
5129 search_range: Range<T>,
5130 reversed: bool,
5131 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5132 where
5133 T: 'a + Clone + ToOffset,
5134 O: 'a + FromAnchor,
5135 {
5136 let mut iterators: Vec<_> = self
5137 .diagnostics
5138 .iter()
5139 .map(|(_, collection)| {
5140 collection
5141 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5142 .peekable()
5143 })
5144 .collect();
5145
5146 std::iter::from_fn(move || {
5147 let (next_ix, _) = iterators
5148 .iter_mut()
5149 .enumerate()
5150 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5151 .min_by(|(_, a), (_, b)| {
5152 let cmp = a
5153 .range
5154 .start
5155 .cmp(&b.range.start, self)
5156 // when range is equal, sort by diagnostic severity
5157 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5158 // and stabilize order with group_id
5159 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5160 if reversed { cmp.reverse() } else { cmp }
5161 })?;
5162 iterators[next_ix]
5163 .next()
5164 .map(
5165 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5166 diagnostic,
5167 range: FromAnchor::from_anchor(&range.start, self)
5168 ..FromAnchor::from_anchor(&range.end, self),
5169 },
5170 )
5171 })
5172 }
5173
5174 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5175 /// should be used instead.
5176 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5177 &self.diagnostics
5178 }
5179
5180 /// Returns all the diagnostic groups associated with the given
5181 /// language server ID. If no language server ID is provided,
5182 /// all diagnostics groups are returned.
5183 pub fn diagnostic_groups(
5184 &self,
5185 language_server_id: Option<LanguageServerId>,
5186 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5187 let mut groups = Vec::new();
5188
5189 if let Some(language_server_id) = language_server_id {
5190 if let Ok(ix) = self
5191 .diagnostics
5192 .binary_search_by_key(&language_server_id, |e| e.0)
5193 {
5194 self.diagnostics[ix]
5195 .1
5196 .groups(language_server_id, &mut groups, self);
5197 }
5198 } else {
5199 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5200 diagnostics.groups(*language_server_id, &mut groups, self);
5201 }
5202 }
5203
5204 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5205 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5206 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5207 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5208 });
5209
5210 groups
5211 }
5212
5213 /// Returns an iterator over the diagnostics for the given group.
5214 pub fn diagnostic_group<O>(
5215 &self,
5216 group_id: usize,
5217 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5218 where
5219 O: FromAnchor + 'static,
5220 {
5221 self.diagnostics
5222 .iter()
5223 .flat_map(move |(_, set)| set.group(group_id, self))
5224 }
5225
5226 /// An integer version number that accounts for all updates besides
5227 /// the buffer's text itself (which is versioned via a version vector).
5228 pub fn non_text_state_update_count(&self) -> usize {
5229 self.non_text_state_update_count
5230 }
5231
5232 /// An integer version that changes when the buffer's syntax changes.
5233 pub fn syntax_update_count(&self) -> usize {
5234 self.syntax.update_count()
5235 }
5236
5237 /// Returns a snapshot of underlying file.
5238 pub fn file(&self) -> Option<&Arc<dyn File>> {
5239 self.file.as_ref()
5240 }
5241
5242 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5243 if let Some(file) = self.file() {
5244 if file.path().file_name().is_none() || include_root {
5245 Some(file.full_path(cx).to_string_lossy().into_owned())
5246 } else {
5247 Some(file.path().display(file.path_style(cx)).to_string())
5248 }
5249 } else {
5250 None
5251 }
5252 }
5253
5254 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5255 let query_str = query.fuzzy_contents;
5256 if query_str.is_some_and(|query| query.is_empty()) {
5257 return BTreeMap::default();
5258 }
5259
5260 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5261 language,
5262 override_id: None,
5263 }));
5264
5265 let mut query_ix = 0;
5266 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5267 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5268
5269 let mut words = BTreeMap::default();
5270 let mut current_word_start_ix = None;
5271 let mut chunk_ix = query.range.start;
5272 for chunk in self.chunks(query.range, false) {
5273 for (i, c) in chunk.text.char_indices() {
5274 let ix = chunk_ix + i;
5275 if classifier.is_word(c) {
5276 if current_word_start_ix.is_none() {
5277 current_word_start_ix = Some(ix);
5278 }
5279
5280 if let Some(query_chars) = &query_chars
5281 && query_ix < query_len
5282 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5283 {
5284 query_ix += 1;
5285 }
5286 continue;
5287 } else if let Some(word_start) = current_word_start_ix.take()
5288 && query_ix == query_len
5289 {
5290 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5291 let mut word_text = self.text_for_range(word_start..ix).peekable();
5292 let first_char = word_text
5293 .peek()
5294 .and_then(|first_chunk| first_chunk.chars().next());
5295 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5296 if !query.skip_digits
5297 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5298 {
5299 words.insert(word_text.collect(), word_range);
5300 }
5301 }
5302 query_ix = 0;
5303 }
5304 chunk_ix += chunk.text.len();
5305 }
5306
5307 words
5308 }
5309}
5310
5311pub struct WordsQuery<'a> {
5312 /// Only returns words with all chars from the fuzzy string in them.
5313 pub fuzzy_contents: Option<&'a str>,
5314 /// Skips words that start with a digit.
5315 pub skip_digits: bool,
5316 /// Buffer offset range, to look for words.
5317 pub range: Range<usize>,
5318}
5319
5320fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5321 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5322}
5323
5324fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5325 let mut result = IndentSize::spaces(0);
5326 for c in text {
5327 let kind = match c {
5328 ' ' => IndentKind::Space,
5329 '\t' => IndentKind::Tab,
5330 _ => break,
5331 };
5332 if result.len == 0 {
5333 result.kind = kind;
5334 }
5335 result.len += 1;
5336 }
5337 result
5338}
5339
5340impl Clone for BufferSnapshot {
5341 fn clone(&self) -> Self {
5342 Self {
5343 text: self.text.clone(),
5344 syntax: self.syntax.clone(),
5345 file: self.file.clone(),
5346 remote_selections: self.remote_selections.clone(),
5347 diagnostics: self.diagnostics.clone(),
5348 language: self.language.clone(),
5349 tree_sitter_data: self.tree_sitter_data.clone(),
5350 non_text_state_update_count: self.non_text_state_update_count,
5351 capability: self.capability,
5352 modeline: self.modeline.clone(),
5353 }
5354 }
5355}
5356
5357impl Deref for BufferSnapshot {
5358 type Target = text::BufferSnapshot;
5359
5360 fn deref(&self) -> &Self::Target {
5361 &self.text
5362 }
5363}
5364
5365unsafe impl Send for BufferChunks<'_> {}
5366
5367impl<'a> BufferChunks<'a> {
5368 pub(crate) fn new(
5369 text: &'a Rope,
5370 range: Range<usize>,
5371 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5372 diagnostics: bool,
5373 buffer_snapshot: Option<&'a BufferSnapshot>,
5374 ) -> Self {
5375 let mut highlights = None;
5376 if let Some((captures, highlight_maps)) = syntax {
5377 highlights = Some(BufferChunkHighlights {
5378 captures,
5379 next_capture: None,
5380 stack: Default::default(),
5381 highlight_maps,
5382 })
5383 }
5384
5385 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5386 let chunks = text.chunks_in_range(range.clone());
5387
5388 let mut this = BufferChunks {
5389 range,
5390 buffer_snapshot,
5391 chunks,
5392 diagnostic_endpoints,
5393 error_depth: 0,
5394 warning_depth: 0,
5395 information_depth: 0,
5396 hint_depth: 0,
5397 unnecessary_depth: 0,
5398 underline: true,
5399 highlights,
5400 };
5401 this.initialize_diagnostic_endpoints();
5402 this
5403 }
5404
5405 /// Seeks to the given byte offset in the buffer.
5406 pub fn seek(&mut self, range: Range<usize>) {
5407 let old_range = std::mem::replace(&mut self.range, range.clone());
5408 self.chunks.set_range(self.range.clone());
5409 if let Some(highlights) = self.highlights.as_mut() {
5410 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5411 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5412 highlights
5413 .stack
5414 .retain(|(end_offset, _)| *end_offset > range.start);
5415 if let Some(capture) = &highlights.next_capture
5416 && range.start >= capture.node.start_byte()
5417 {
5418 let next_capture_end = capture.node.end_byte();
5419 if range.start < next_capture_end {
5420 highlights.stack.push((
5421 next_capture_end,
5422 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5423 ));
5424 }
5425 highlights.next_capture.take();
5426 }
5427 } else if let Some(snapshot) = self.buffer_snapshot {
5428 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5429 *highlights = BufferChunkHighlights {
5430 captures,
5431 next_capture: None,
5432 stack: Default::default(),
5433 highlight_maps,
5434 };
5435 } else {
5436 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5437 // Seeking such BufferChunks is not supported.
5438 debug_assert!(
5439 false,
5440 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5441 );
5442 }
5443
5444 highlights.captures.set_byte_range(self.range.clone());
5445 self.initialize_diagnostic_endpoints();
5446 }
5447 }
5448
5449 fn initialize_diagnostic_endpoints(&mut self) {
5450 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5451 && let Some(buffer) = self.buffer_snapshot
5452 {
5453 let mut diagnostic_endpoints = Vec::new();
5454 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5455 diagnostic_endpoints.push(DiagnosticEndpoint {
5456 offset: entry.range.start,
5457 is_start: true,
5458 severity: entry.diagnostic.severity,
5459 is_unnecessary: entry.diagnostic.is_unnecessary,
5460 underline: entry.diagnostic.underline,
5461 });
5462 diagnostic_endpoints.push(DiagnosticEndpoint {
5463 offset: entry.range.end,
5464 is_start: false,
5465 severity: entry.diagnostic.severity,
5466 is_unnecessary: entry.diagnostic.is_unnecessary,
5467 underline: entry.diagnostic.underline,
5468 });
5469 }
5470 diagnostic_endpoints
5471 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5472 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5473 self.hint_depth = 0;
5474 self.error_depth = 0;
5475 self.warning_depth = 0;
5476 self.information_depth = 0;
5477 }
5478 }
5479
5480 /// The current byte offset in the buffer.
5481 pub fn offset(&self) -> usize {
5482 self.range.start
5483 }
5484
5485 pub fn range(&self) -> Range<usize> {
5486 self.range.clone()
5487 }
5488
5489 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5490 let depth = match endpoint.severity {
5491 DiagnosticSeverity::ERROR => &mut self.error_depth,
5492 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5493 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5494 DiagnosticSeverity::HINT => &mut self.hint_depth,
5495 _ => return,
5496 };
5497 if endpoint.is_start {
5498 *depth += 1;
5499 } else {
5500 *depth -= 1;
5501 }
5502
5503 if endpoint.is_unnecessary {
5504 if endpoint.is_start {
5505 self.unnecessary_depth += 1;
5506 } else {
5507 self.unnecessary_depth -= 1;
5508 }
5509 }
5510 }
5511
5512 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5513 if self.error_depth > 0 {
5514 Some(DiagnosticSeverity::ERROR)
5515 } else if self.warning_depth > 0 {
5516 Some(DiagnosticSeverity::WARNING)
5517 } else if self.information_depth > 0 {
5518 Some(DiagnosticSeverity::INFORMATION)
5519 } else if self.hint_depth > 0 {
5520 Some(DiagnosticSeverity::HINT)
5521 } else {
5522 None
5523 }
5524 }
5525
5526 fn current_code_is_unnecessary(&self) -> bool {
5527 self.unnecessary_depth > 0
5528 }
5529}
5530
5531impl<'a> Iterator for BufferChunks<'a> {
5532 type Item = Chunk<'a>;
5533
5534 fn next(&mut self) -> Option<Self::Item> {
5535 let mut next_capture_start = usize::MAX;
5536 let mut next_diagnostic_endpoint = usize::MAX;
5537
5538 if let Some(highlights) = self.highlights.as_mut() {
5539 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5540 if *parent_capture_end <= self.range.start {
5541 highlights.stack.pop();
5542 } else {
5543 break;
5544 }
5545 }
5546
5547 if highlights.next_capture.is_none() {
5548 highlights.next_capture = highlights.captures.next();
5549 }
5550
5551 while let Some(capture) = highlights.next_capture.as_ref() {
5552 if self.range.start < capture.node.start_byte() {
5553 next_capture_start = capture.node.start_byte();
5554 break;
5555 } else {
5556 let highlight_id =
5557 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5558 highlights
5559 .stack
5560 .push((capture.node.end_byte(), highlight_id));
5561 highlights.next_capture = highlights.captures.next();
5562 }
5563 }
5564 }
5565
5566 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5567 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5568 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5569 if endpoint.offset <= self.range.start {
5570 self.update_diagnostic_depths(endpoint);
5571 diagnostic_endpoints.next();
5572 self.underline = endpoint.underline;
5573 } else {
5574 next_diagnostic_endpoint = endpoint.offset;
5575 break;
5576 }
5577 }
5578 }
5579 self.diagnostic_endpoints = diagnostic_endpoints;
5580
5581 if let Some(ChunkBitmaps {
5582 text: chunk,
5583 chars: chars_map,
5584 tabs,
5585 }) = self.chunks.peek_with_bitmaps()
5586 {
5587 let chunk_start = self.range.start;
5588 let mut chunk_end = (self.chunks.offset() + chunk.len())
5589 .min(next_capture_start)
5590 .min(next_diagnostic_endpoint);
5591 let mut highlight_id = None;
5592 if let Some(highlights) = self.highlights.as_ref()
5593 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5594 {
5595 chunk_end = chunk_end.min(*parent_capture_end);
5596 highlight_id = Some(*parent_highlight_id);
5597 }
5598 let bit_start = chunk_start - self.chunks.offset();
5599 let bit_end = chunk_end - self.chunks.offset();
5600
5601 let slice = &chunk[bit_start..bit_end];
5602
5603 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5604 let tabs = (tabs >> bit_start) & mask;
5605 let chars = (chars_map >> bit_start) & mask;
5606
5607 self.range.start = chunk_end;
5608 if self.range.start == self.chunks.offset() + chunk.len() {
5609 self.chunks.next().unwrap();
5610 }
5611
5612 Some(Chunk {
5613 text: slice,
5614 syntax_highlight_id: highlight_id,
5615 underline: self.underline,
5616 diagnostic_severity: self.current_diagnostic_severity(),
5617 is_unnecessary: self.current_code_is_unnecessary(),
5618 tabs,
5619 chars,
5620 ..Chunk::default()
5621 })
5622 } else {
5623 None
5624 }
5625 }
5626}
5627
5628impl operation_queue::Operation for Operation {
5629 fn lamport_timestamp(&self) -> clock::Lamport {
5630 match self {
5631 Operation::Buffer(_) => {
5632 unreachable!("buffer operations should never be deferred at this layer")
5633 }
5634 Operation::UpdateDiagnostics {
5635 lamport_timestamp, ..
5636 }
5637 | Operation::UpdateSelections {
5638 lamport_timestamp, ..
5639 }
5640 | Operation::UpdateCompletionTriggers {
5641 lamport_timestamp, ..
5642 }
5643 | Operation::UpdateLineEnding {
5644 lamport_timestamp, ..
5645 } => *lamport_timestamp,
5646 }
5647 }
5648}
5649
5650impl Default for Diagnostic {
5651 fn default() -> Self {
5652 Self {
5653 source: Default::default(),
5654 source_kind: DiagnosticSourceKind::Other,
5655 code: None,
5656 code_description: None,
5657 severity: DiagnosticSeverity::ERROR,
5658 message: Default::default(),
5659 markdown: None,
5660 group_id: 0,
5661 is_primary: false,
5662 is_disk_based: false,
5663 is_unnecessary: false,
5664 underline: true,
5665 data: None,
5666 registration_id: None,
5667 }
5668 }
5669}
5670
5671impl IndentSize {
5672 /// Returns an [`IndentSize`] representing the given spaces.
5673 pub fn spaces(len: u32) -> Self {
5674 Self {
5675 len,
5676 kind: IndentKind::Space,
5677 }
5678 }
5679
5680 /// Returns an [`IndentSize`] representing a tab.
5681 pub fn tab() -> Self {
5682 Self {
5683 len: 1,
5684 kind: IndentKind::Tab,
5685 }
5686 }
5687
5688 /// An iterator over the characters represented by this [`IndentSize`].
5689 pub fn chars(&self) -> impl Iterator<Item = char> {
5690 iter::repeat(self.char()).take(self.len as usize)
5691 }
5692
5693 /// The character representation of this [`IndentSize`].
5694 pub fn char(&self) -> char {
5695 match self.kind {
5696 IndentKind::Space => ' ',
5697 IndentKind::Tab => '\t',
5698 }
5699 }
5700
5701 /// Consumes the current [`IndentSize`] and returns a new one that has
5702 /// been shrunk or enlarged by the given size along the given direction.
5703 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5704 match direction {
5705 Ordering::Less => {
5706 if self.kind == size.kind && self.len >= size.len {
5707 self.len -= size.len;
5708 }
5709 }
5710 Ordering::Equal => {}
5711 Ordering::Greater => {
5712 if self.len == 0 {
5713 self = size;
5714 } else if self.kind == size.kind {
5715 self.len += size.len;
5716 }
5717 }
5718 }
5719 self
5720 }
5721
5722 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5723 match self.kind {
5724 IndentKind::Space => self.len as usize,
5725 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5726 }
5727 }
5728}
5729
5730#[cfg(any(test, feature = "test-support"))]
5731pub struct TestFile {
5732 pub path: Arc<RelPath>,
5733 pub root_name: String,
5734 pub local_root: Option<PathBuf>,
5735}
5736
5737#[cfg(any(test, feature = "test-support"))]
5738impl File for TestFile {
5739 fn path(&self) -> &Arc<RelPath> {
5740 &self.path
5741 }
5742
5743 fn full_path(&self, _: &gpui::App) -> PathBuf {
5744 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5745 }
5746
5747 fn as_local(&self) -> Option<&dyn LocalFile> {
5748 if self.local_root.is_some() {
5749 Some(self)
5750 } else {
5751 None
5752 }
5753 }
5754
5755 fn disk_state(&self) -> DiskState {
5756 unimplemented!()
5757 }
5758
5759 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5760 self.path().file_name().unwrap_or(self.root_name.as_ref())
5761 }
5762
5763 fn worktree_id(&self, _: &App) -> WorktreeId {
5764 WorktreeId::from_usize(0)
5765 }
5766
5767 fn to_proto(&self, _: &App) -> rpc::proto::File {
5768 unimplemented!()
5769 }
5770
5771 fn is_private(&self) -> bool {
5772 false
5773 }
5774
5775 fn path_style(&self, _cx: &App) -> PathStyle {
5776 PathStyle::local()
5777 }
5778}
5779
5780#[cfg(any(test, feature = "test-support"))]
5781impl LocalFile for TestFile {
5782 fn abs_path(&self, _cx: &App) -> PathBuf {
5783 PathBuf::from(self.local_root.as_ref().unwrap())
5784 .join(&self.root_name)
5785 .join(self.path.as_std_path())
5786 }
5787
5788 fn load(&self, _cx: &App) -> Task<Result<String>> {
5789 unimplemented!()
5790 }
5791
5792 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5793 unimplemented!()
5794 }
5795}
5796
5797pub(crate) fn contiguous_ranges(
5798 values: impl Iterator<Item = u32>,
5799 max_len: usize,
5800) -> impl Iterator<Item = Range<u32>> {
5801 let mut values = values;
5802 let mut current_range: Option<Range<u32>> = None;
5803 std::iter::from_fn(move || {
5804 loop {
5805 if let Some(value) = values.next() {
5806 if let Some(range) = &mut current_range
5807 && value == range.end
5808 && range.len() < max_len
5809 {
5810 range.end += 1;
5811 continue;
5812 }
5813
5814 let prev_range = current_range.clone();
5815 current_range = Some(value..(value + 1));
5816 if prev_range.is_some() {
5817 return prev_range;
5818 }
5819 } else {
5820 return current_range.take();
5821 }
5822 }
5823 })
5824}
5825
5826#[derive(Default, Debug)]
5827pub struct CharClassifier {
5828 scope: Option<LanguageScope>,
5829 scope_context: Option<CharScopeContext>,
5830 ignore_punctuation: bool,
5831}
5832
5833impl CharClassifier {
5834 pub fn new(scope: Option<LanguageScope>) -> Self {
5835 Self {
5836 scope,
5837 scope_context: None,
5838 ignore_punctuation: false,
5839 }
5840 }
5841
5842 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5843 Self {
5844 scope_context,
5845 ..self
5846 }
5847 }
5848
5849 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5850 Self {
5851 ignore_punctuation,
5852 ..self
5853 }
5854 }
5855
5856 pub fn is_whitespace(&self, c: char) -> bool {
5857 self.kind(c) == CharKind::Whitespace
5858 }
5859
5860 pub fn is_word(&self, c: char) -> bool {
5861 self.kind(c) == CharKind::Word
5862 }
5863
5864 pub fn is_punctuation(&self, c: char) -> bool {
5865 self.kind(c) == CharKind::Punctuation
5866 }
5867
5868 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5869 if c.is_alphanumeric() || c == '_' {
5870 return CharKind::Word;
5871 }
5872
5873 if let Some(scope) = &self.scope {
5874 let characters = match self.scope_context {
5875 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5876 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5877 None => scope.word_characters(),
5878 };
5879 if let Some(characters) = characters
5880 && characters.contains(&c)
5881 {
5882 return CharKind::Word;
5883 }
5884 }
5885
5886 if c.is_whitespace() {
5887 return CharKind::Whitespace;
5888 }
5889
5890 if ignore_punctuation {
5891 CharKind::Word
5892 } else {
5893 CharKind::Punctuation
5894 }
5895 }
5896
5897 pub fn kind(&self, c: char) -> CharKind {
5898 self.kind_with(c, self.ignore_punctuation)
5899 }
5900}
5901
5902/// Find all of the ranges of whitespace that occur at the ends of lines
5903/// in the given rope.
5904///
5905/// This could also be done with a regex search, but this implementation
5906/// avoids copying text.
5907pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5908 let mut ranges = Vec::new();
5909
5910 let mut offset = 0;
5911 let mut prev_chunk_trailing_whitespace_range = 0..0;
5912 for chunk in rope.chunks() {
5913 let mut prev_line_trailing_whitespace_range = 0..0;
5914 for (i, line) in chunk.split('\n').enumerate() {
5915 let line_end_offset = offset + line.len();
5916 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5917 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5918
5919 if i == 0 && trimmed_line_len == 0 {
5920 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5921 }
5922 if !prev_line_trailing_whitespace_range.is_empty() {
5923 ranges.push(prev_line_trailing_whitespace_range);
5924 }
5925
5926 offset = line_end_offset + 1;
5927 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5928 }
5929
5930 offset -= 1;
5931 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5932 }
5933
5934 if !prev_chunk_trailing_whitespace_range.is_empty() {
5935 ranges.push(prev_chunk_trailing_whitespace_range);
5936 }
5937
5938 ranges
5939}