1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::Arc,
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a mutable replica, but toggled to be only readable.
85 Read,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90impl Capability {
91 /// Returns `true` if the capability is `ReadWrite`.
92 pub fn editable(self) -> bool {
93 matches!(self, Capability::ReadWrite)
94 }
95}
96
97pub type BufferRow = u32;
98
99/// An in-memory representation of a source code file, including its text,
100/// syntax trees, git status, and diagnostics.
101pub struct Buffer {
102 text: TextBuffer,
103 branch_state: Option<BufferBranchState>,
104 /// Filesystem state, `None` when there is no path.
105 file: Option<Arc<dyn File>>,
106 /// The mtime of the file when this buffer was last loaded from
107 /// or saved to disk.
108 saved_mtime: Option<MTime>,
109 /// The version vector when this buffer was last loaded from
110 /// or saved to disk.
111 saved_version: clock::Global,
112 preview_version: clock::Global,
113 transaction_depth: usize,
114 was_dirty_before_starting_transaction: Option<bool>,
115 reload_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 autoindent_requests: Vec<Arc<AutoindentRequest>>,
118 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
119 pending_autoindent: Option<Task<()>>,
120 sync_parse_timeout: Option<Duration>,
121 syntax_map: Mutex<SyntaxMap>,
122 reparse: Option<Task<()>>,
123 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
124 non_text_state_update_count: usize,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 diagnostics_timestamp: clock::Lamport,
128 completion_triggers: BTreeSet<String>,
129 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
130 completion_triggers_timestamp: clock::Lamport,
131 deferred_ops: OperationQueue<Operation>,
132 capability: Capability,
133 has_conflict: bool,
134 /// Memoize calls to has_changes_since(saved_version).
135 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
136 has_unsaved_edits: Cell<(clock::Global, bool)>,
137 change_bits: Vec<rc::Weak<Cell<bool>>>,
138 _subscriptions: Vec<gpui::Subscription>,
139 tree_sitter_data: Arc<TreeSitterData>,
140 encoding: &'static Encoding,
141 has_bom: bool,
142}
143
144#[derive(Debug)]
145pub struct TreeSitterData {
146 chunks: RowChunks,
147 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
148}
149
150const MAX_ROWS_IN_A_CHUNK: u32 = 50;
151
152impl TreeSitterData {
153 fn clear(&mut self, snapshot: text::BufferSnapshot) {
154 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 self.brackets_by_chunks.get_mut().clear();
156 self.brackets_by_chunks
157 .get_mut()
158 .resize(self.chunks.len(), None);
159 }
160
161 fn new(snapshot: text::BufferSnapshot) -> Self {
162 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
163 Self {
164 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
165 chunks,
166 }
167 }
168
169 fn version(&self) -> &clock::Global {
170 self.chunks.version()
171 }
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq)]
175pub enum ParseStatus {
176 Idle,
177 Parsing,
178}
179
180struct BufferBranchState {
181 base_buffer: Entity<Buffer>,
182 merged_operations: Vec<Lamport>,
183}
184
185/// An immutable, cheaply cloneable representation of a fixed
186/// state of a buffer.
187pub struct BufferSnapshot {
188 pub text: text::BufferSnapshot,
189 pub syntax: SyntaxSnapshot,
190 file: Option<Arc<dyn File>>,
191 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
192 remote_selections: TreeMap<ReplicaId, SelectionSet>,
193 language: Option<Arc<Language>>,
194 non_text_state_update_count: usize,
195 tree_sitter_data: Arc<TreeSitterData>,
196 pub capability: Capability,
197}
198
199/// The kind and amount of indentation in a particular line. For now,
200/// assumes that indentation is all the same character.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub struct IndentSize {
203 /// The number of bytes that comprise the indentation.
204 pub len: u32,
205 /// The kind of whitespace used for indentation.
206 pub kind: IndentKind,
207}
208
209/// A whitespace character that's used for indentation.
210#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
211pub enum IndentKind {
212 /// An ASCII space character.
213 #[default]
214 Space,
215 /// An ASCII tab character.
216 Tab,
217}
218
219/// The shape of a selection cursor.
220#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
221pub enum CursorShape {
222 /// A vertical bar
223 #[default]
224 Bar,
225 /// A block that surrounds the following character
226 Block,
227 /// An underline that runs along the following character
228 Underline,
229 /// A box drawn around the following character
230 Hollow,
231}
232
233impl From<settings::CursorShape> for CursorShape {
234 fn from(shape: settings::CursorShape) -> Self {
235 match shape {
236 settings::CursorShape::Bar => CursorShape::Bar,
237 settings::CursorShape::Block => CursorShape::Block,
238 settings::CursorShape::Underline => CursorShape::Underline,
239 settings::CursorShape::Hollow => CursorShape::Hollow,
240 }
241 }
242}
243
244#[derive(Clone, Debug)]
245struct SelectionSet {
246 line_mode: bool,
247 cursor_shape: CursorShape,
248 selections: Arc<[Selection<Anchor>]>,
249 lamport_timestamp: clock::Lamport,
250}
251
252/// A diagnostic associated with a certain range of a buffer.
253#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
254pub struct Diagnostic {
255 /// The name of the service that produced this diagnostic.
256 pub source: Option<String>,
257 /// The ID provided by the dynamic registration that produced this diagnostic.
258 pub registration_id: Option<SharedString>,
259 /// A machine-readable code that identifies this diagnostic.
260 pub code: Option<NumberOrString>,
261 pub code_description: Option<lsp::Uri>,
262 /// Whether this diagnostic is a hint, warning, or error.
263 pub severity: DiagnosticSeverity,
264 /// The human-readable message associated with this diagnostic.
265 pub message: String,
266 /// The human-readable message (in markdown format)
267 pub markdown: Option<String>,
268 /// An id that identifies the group to which this diagnostic belongs.
269 ///
270 /// When a language server produces a diagnostic with
271 /// one or more associated diagnostics, those diagnostics are all
272 /// assigned a single group ID.
273 pub group_id: usize,
274 /// Whether this diagnostic is the primary diagnostic for its group.
275 ///
276 /// In a given group, the primary diagnostic is the top-level diagnostic
277 /// returned by the language server. The non-primary diagnostics are the
278 /// associated diagnostics.
279 pub is_primary: bool,
280 /// Whether this diagnostic is considered to originate from an analysis of
281 /// files on disk, as opposed to any unsaved buffer contents. This is a
282 /// property of a given diagnostic source, and is configured for a given
283 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
284 /// for the language server.
285 pub is_disk_based: bool,
286 /// Whether this diagnostic marks unnecessary code.
287 pub is_unnecessary: bool,
288 /// Quick separation of diagnostics groups based by their source.
289 pub source_kind: DiagnosticSourceKind,
290 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
291 pub data: Option<Value>,
292 /// Whether to underline the corresponding text range in the editor.
293 pub underline: bool,
294}
295
296#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
297pub enum DiagnosticSourceKind {
298 Pulled,
299 Pushed,
300 Other,
301}
302
303/// An operation used to synchronize this buffer with its other replicas.
304#[derive(Clone, Debug, PartialEq)]
305pub enum Operation {
306 /// A text operation.
307 Buffer(text::Operation),
308
309 /// An update to the buffer's diagnostics.
310 UpdateDiagnostics {
311 /// The id of the language server that produced the new diagnostics.
312 server_id: LanguageServerId,
313 /// The diagnostics.
314 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
315 /// The buffer's lamport timestamp.
316 lamport_timestamp: clock::Lamport,
317 },
318
319 /// An update to the most recent selections in this buffer.
320 UpdateSelections {
321 /// The selections.
322 selections: Arc<[Selection<Anchor>]>,
323 /// The buffer's lamport timestamp.
324 lamport_timestamp: clock::Lamport,
325 /// Whether the selections are in 'line mode'.
326 line_mode: bool,
327 /// The [`CursorShape`] associated with these selections.
328 cursor_shape: CursorShape,
329 },
330
331 /// An update to the characters that should trigger autocompletion
332 /// for this buffer.
333 UpdateCompletionTriggers {
334 /// The characters that trigger autocompletion.
335 triggers: Vec<String>,
336 /// The buffer's lamport timestamp.
337 lamport_timestamp: clock::Lamport,
338 /// The language server ID.
339 server_id: LanguageServerId,
340 },
341
342 /// An update to the line ending type of this buffer.
343 UpdateLineEnding {
344 /// The line ending type.
345 line_ending: LineEnding,
346 /// The buffer's lamport timestamp.
347 lamport_timestamp: clock::Lamport,
348 },
349}
350
351/// An event that occurs in a buffer.
352#[derive(Clone, Debug, PartialEq)]
353pub enum BufferEvent {
354 /// The buffer was changed in a way that must be
355 /// propagated to its other replicas.
356 Operation {
357 operation: Operation,
358 is_local: bool,
359 },
360 /// The buffer was edited.
361 Edited,
362 /// The buffer's `dirty` bit changed.
363 DirtyChanged,
364 /// The buffer was saved.
365 Saved,
366 /// The buffer's file was changed on disk.
367 FileHandleChanged,
368 /// The buffer was reloaded.
369 Reloaded,
370 /// The buffer is in need of a reload
371 ReloadNeeded,
372 /// The buffer's language was changed.
373 /// The boolean indicates whether this buffer did not have a language before, but does now.
374 LanguageChanged(bool),
375 /// The buffer's syntax trees were updated.
376 Reparsed,
377 /// The buffer's diagnostics were updated.
378 DiagnosticsUpdated,
379 /// The buffer gained or lost editing capabilities.
380 CapabilityChanged,
381}
382
383/// The file associated with a buffer.
384pub trait File: Send + Sync + Any {
385 /// Returns the [`LocalFile`] associated with this file, if the
386 /// file is local.
387 fn as_local(&self) -> Option<&dyn LocalFile>;
388
389 /// Returns whether this file is local.
390 fn is_local(&self) -> bool {
391 self.as_local().is_some()
392 }
393
394 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
395 /// only available in some states, such as modification time.
396 fn disk_state(&self) -> DiskState;
397
398 /// Returns the path of this file relative to the worktree's root directory.
399 fn path(&self) -> &Arc<RelPath>;
400
401 /// Returns the path of this file relative to the worktree's parent directory (this means it
402 /// includes the name of the worktree's root folder).
403 fn full_path(&self, cx: &App) -> PathBuf;
404
405 /// Returns the path style of this file.
406 fn path_style(&self, cx: &App) -> PathStyle;
407
408 /// Returns the last component of this handle's absolute path. If this handle refers to the root
409 /// of its worktree, then this method will return the name of the worktree itself.
410 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
411
412 /// Returns the id of the worktree to which this file belongs.
413 ///
414 /// This is needed for looking up project-specific settings.
415 fn worktree_id(&self, cx: &App) -> WorktreeId;
416
417 /// Converts this file into a protobuf message.
418 fn to_proto(&self, cx: &App) -> rpc::proto::File;
419
420 /// Return whether Zed considers this to be a private file.
421 fn is_private(&self) -> bool;
422
423 fn can_open(&self) -> bool {
424 !self.is_local()
425 }
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug, Clone)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1091 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1092 self.set_language(Some(language), cx);
1093 self
1094 }
1095
1096 /// Returns the [`Capability`] of this buffer.
1097 pub fn capability(&self) -> Capability {
1098 self.capability
1099 }
1100
1101 /// Whether this buffer can only be read.
1102 pub fn read_only(&self) -> bool {
1103 !self.capability.editable()
1104 }
1105
1106 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1107 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1108 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1109 let snapshot = buffer.snapshot();
1110 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1111 let tree_sitter_data = TreeSitterData::new(snapshot);
1112 Self {
1113 saved_mtime,
1114 tree_sitter_data: Arc::new(tree_sitter_data),
1115 saved_version: buffer.version(),
1116 preview_version: buffer.version(),
1117 reload_task: None,
1118 transaction_depth: 0,
1119 was_dirty_before_starting_transaction: None,
1120 has_unsaved_edits: Cell::new((buffer.version(), false)),
1121 text: buffer,
1122 branch_state: None,
1123 file,
1124 capability,
1125 syntax_map,
1126 reparse: None,
1127 non_text_state_update_count: 0,
1128 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1129 Some(Duration::from_millis(10))
1130 } else {
1131 Some(Duration::from_millis(1))
1132 },
1133 parse_status: watch::channel(ParseStatus::Idle),
1134 autoindent_requests: Default::default(),
1135 wait_for_autoindent_txs: Default::default(),
1136 pending_autoindent: Default::default(),
1137 language: None,
1138 remote_selections: Default::default(),
1139 diagnostics: Default::default(),
1140 diagnostics_timestamp: Lamport::MIN,
1141 completion_triggers: Default::default(),
1142 completion_triggers_per_language_server: Default::default(),
1143 completion_triggers_timestamp: Lamport::MIN,
1144 deferred_ops: OperationQueue::new(),
1145 has_conflict: false,
1146 change_bits: Default::default(),
1147 _subscriptions: Vec::new(),
1148 encoding: encoding_rs::UTF_8,
1149 has_bom: false,
1150 }
1151 }
1152
1153 pub fn build_snapshot(
1154 text: Rope,
1155 language: Option<Arc<Language>>,
1156 language_registry: Option<Arc<LanguageRegistry>>,
1157 cx: &mut App,
1158 ) -> impl Future<Output = BufferSnapshot> + use<> {
1159 let entity_id = cx.reserve_entity::<Self>().entity_id();
1160 let buffer_id = entity_id.as_non_zero_u64().into();
1161 async move {
1162 let text =
1163 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1164 .snapshot();
1165 let mut syntax = SyntaxMap::new(&text).snapshot();
1166 if let Some(language) = language.clone() {
1167 let language_registry = language_registry.clone();
1168 syntax.reparse(&text, language_registry, language);
1169 }
1170 let tree_sitter_data = TreeSitterData::new(text.clone());
1171 BufferSnapshot {
1172 text,
1173 syntax,
1174 file: None,
1175 diagnostics: Default::default(),
1176 remote_selections: Default::default(),
1177 tree_sitter_data: Arc::new(tree_sitter_data),
1178 language,
1179 non_text_state_update_count: 0,
1180 capability: Capability::ReadOnly,
1181 }
1182 }
1183 }
1184
1185 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1186 let entity_id = cx.reserve_entity::<Self>().entity_id();
1187 let buffer_id = entity_id.as_non_zero_u64().into();
1188 let text = TextBuffer::new_normalized(
1189 ReplicaId::LOCAL,
1190 buffer_id,
1191 Default::default(),
1192 Rope::new(),
1193 )
1194 .snapshot();
1195 let syntax = SyntaxMap::new(&text).snapshot();
1196 let tree_sitter_data = TreeSitterData::new(text.clone());
1197 BufferSnapshot {
1198 text,
1199 syntax,
1200 tree_sitter_data: Arc::new(tree_sitter_data),
1201 file: None,
1202 diagnostics: Default::default(),
1203 remote_selections: Default::default(),
1204 language: None,
1205 non_text_state_update_count: 0,
1206 capability: Capability::ReadOnly,
1207 }
1208 }
1209
1210 #[cfg(any(test, feature = "test-support"))]
1211 pub fn build_snapshot_sync(
1212 text: Rope,
1213 language: Option<Arc<Language>>,
1214 language_registry: Option<Arc<LanguageRegistry>>,
1215 cx: &mut App,
1216 ) -> BufferSnapshot {
1217 let entity_id = cx.reserve_entity::<Self>().entity_id();
1218 let buffer_id = entity_id.as_non_zero_u64().into();
1219 let text =
1220 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1221 .snapshot();
1222 let mut syntax = SyntaxMap::new(&text).snapshot();
1223 if let Some(language) = language.clone() {
1224 syntax.reparse(&text, language_registry, language);
1225 }
1226 let tree_sitter_data = TreeSitterData::new(text.clone());
1227 BufferSnapshot {
1228 text,
1229 syntax,
1230 tree_sitter_data: Arc::new(tree_sitter_data),
1231 file: None,
1232 diagnostics: Default::default(),
1233 remote_selections: Default::default(),
1234 language,
1235 non_text_state_update_count: 0,
1236 capability: Capability::ReadOnly,
1237 }
1238 }
1239
1240 /// Retrieve a snapshot of the buffer's current state. This is computationally
1241 /// cheap, and allows reading from the buffer on a background thread.
1242 pub fn snapshot(&self) -> BufferSnapshot {
1243 let text = self.text.snapshot();
1244 let mut syntax_map = self.syntax_map.lock();
1245 syntax_map.interpolate(&text);
1246 let syntax = syntax_map.snapshot();
1247
1248 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1249 Arc::new(TreeSitterData::new(text.clone()))
1250 } else {
1251 self.tree_sitter_data.clone()
1252 };
1253
1254 BufferSnapshot {
1255 text,
1256 syntax,
1257 tree_sitter_data,
1258 file: self.file.clone(),
1259 remote_selections: self.remote_selections.clone(),
1260 diagnostics: self.diagnostics.clone(),
1261 language: self.language.clone(),
1262 non_text_state_update_count: self.non_text_state_update_count,
1263 capability: self.capability,
1264 }
1265 }
1266
1267 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1268 let this = cx.entity();
1269 cx.new(|cx| {
1270 let mut branch = Self {
1271 branch_state: Some(BufferBranchState {
1272 base_buffer: this.clone(),
1273 merged_operations: Default::default(),
1274 }),
1275 language: self.language.clone(),
1276 has_conflict: self.has_conflict,
1277 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1278 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1279 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1280 };
1281 if let Some(language_registry) = self.language_registry() {
1282 branch.set_language_registry(language_registry);
1283 }
1284
1285 // Reparse the branch buffer so that we get syntax highlighting immediately.
1286 branch.reparse(cx, true);
1287
1288 branch
1289 })
1290 }
1291
1292 pub fn preview_edits(
1293 &self,
1294 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1295 cx: &App,
1296 ) -> Task<EditPreview> {
1297 let registry = self.language_registry();
1298 let language = self.language().cloned();
1299 let old_snapshot = self.text.snapshot();
1300 let mut branch_buffer = self.text.branch();
1301 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1302 cx.background_spawn(async move {
1303 if !edits.is_empty() {
1304 if let Some(language) = language.clone() {
1305 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1306 }
1307
1308 branch_buffer.edit(edits.iter().cloned());
1309 let snapshot = branch_buffer.snapshot();
1310 syntax_snapshot.interpolate(&snapshot);
1311
1312 if let Some(language) = language {
1313 syntax_snapshot.reparse(&snapshot, registry, language);
1314 }
1315 }
1316 EditPreview {
1317 old_snapshot,
1318 applied_edits_snapshot: branch_buffer.snapshot(),
1319 syntax_snapshot,
1320 }
1321 })
1322 }
1323
1324 /// Applies all of the changes in this buffer that intersect any of the
1325 /// given `ranges` to its base buffer.
1326 ///
1327 /// If `ranges` is empty, then all changes will be applied. This buffer must
1328 /// be a branch buffer to call this method.
1329 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1330 let Some(base_buffer) = self.base_buffer() else {
1331 debug_panic!("not a branch buffer");
1332 return;
1333 };
1334
1335 let mut ranges = if ranges.is_empty() {
1336 &[0..usize::MAX]
1337 } else {
1338 ranges.as_slice()
1339 }
1340 .iter()
1341 .peekable();
1342
1343 let mut edits = Vec::new();
1344 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1345 let mut is_included = false;
1346 while let Some(range) = ranges.peek() {
1347 if range.end < edit.new.start {
1348 ranges.next().unwrap();
1349 } else {
1350 if range.start <= edit.new.end {
1351 is_included = true;
1352 }
1353 break;
1354 }
1355 }
1356
1357 if is_included {
1358 edits.push((
1359 edit.old.clone(),
1360 self.text_for_range(edit.new.clone()).collect::<String>(),
1361 ));
1362 }
1363 }
1364
1365 let operation = base_buffer.update(cx, |base_buffer, cx| {
1366 // cx.emit(BufferEvent::DiffBaseChanged);
1367 base_buffer.edit(edits, None, cx)
1368 });
1369
1370 if let Some(operation) = operation
1371 && let Some(BufferBranchState {
1372 merged_operations, ..
1373 }) = &mut self.branch_state
1374 {
1375 merged_operations.push(operation);
1376 }
1377 }
1378
1379 fn on_base_buffer_event(
1380 &mut self,
1381 _: Entity<Buffer>,
1382 event: &BufferEvent,
1383 cx: &mut Context<Self>,
1384 ) {
1385 let BufferEvent::Operation { operation, .. } = event else {
1386 return;
1387 };
1388 let Some(BufferBranchState {
1389 merged_operations, ..
1390 }) = &mut self.branch_state
1391 else {
1392 return;
1393 };
1394
1395 let mut operation_to_undo = None;
1396 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1397 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1398 {
1399 merged_operations.remove(ix);
1400 operation_to_undo = Some(operation.timestamp);
1401 }
1402
1403 self.apply_ops([operation.clone()], cx);
1404
1405 if let Some(timestamp) = operation_to_undo {
1406 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1407 self.undo_operations(counts, cx);
1408 }
1409 }
1410
1411 #[cfg(test)]
1412 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1413 &self.text
1414 }
1415
1416 /// Retrieve a snapshot of the buffer's raw text, without any
1417 /// language-related state like the syntax tree or diagnostics.
1418 #[ztracing::instrument(skip_all)]
1419 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1420 self.text.snapshot()
1421 }
1422
1423 /// The file associated with the buffer, if any.
1424 pub fn file(&self) -> Option<&Arc<dyn File>> {
1425 self.file.as_ref()
1426 }
1427
1428 /// The version of the buffer that was last saved or reloaded from disk.
1429 pub fn saved_version(&self) -> &clock::Global {
1430 &self.saved_version
1431 }
1432
1433 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1434 pub fn saved_mtime(&self) -> Option<MTime> {
1435 self.saved_mtime
1436 }
1437
1438 /// Returns the character encoding of the buffer's file.
1439 pub fn encoding(&self) -> &'static Encoding {
1440 self.encoding
1441 }
1442
1443 /// Sets the character encoding of the buffer.
1444 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1445 self.encoding = encoding;
1446 }
1447
1448 /// Returns whether the buffer has a Byte Order Mark.
1449 pub fn has_bom(&self) -> bool {
1450 self.has_bom
1451 }
1452
1453 /// Sets whether the buffer has a Byte Order Mark.
1454 pub fn set_has_bom(&mut self, has_bom: bool) {
1455 self.has_bom = has_bom;
1456 }
1457
1458 /// Assign a language to the buffer.
1459 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1460 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1461 }
1462
1463 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1464 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1465 self.set_language_(language, true, cx);
1466 }
1467
1468 #[ztracing::instrument(skip_all)]
1469 fn set_language_(
1470 &mut self,
1471 language: Option<Arc<Language>>,
1472 may_block: bool,
1473 cx: &mut Context<Self>,
1474 ) {
1475 self.non_text_state_update_count += 1;
1476 self.syntax_map.lock().clear(&self.text);
1477 let old_language = std::mem::replace(&mut self.language, language);
1478 self.was_changed();
1479 self.reparse(cx, may_block);
1480 let has_fresh_language =
1481 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1482 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1483 }
1484
1485 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1486 /// other languages if parts of the buffer are written in different languages.
1487 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1488 self.syntax_map
1489 .lock()
1490 .set_language_registry(language_registry);
1491 }
1492
1493 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1494 self.syntax_map.lock().language_registry()
1495 }
1496
1497 /// Assign the line ending type to the buffer.
1498 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1499 self.text.set_line_ending(line_ending);
1500
1501 let lamport_timestamp = self.text.lamport_clock.tick();
1502 self.send_operation(
1503 Operation::UpdateLineEnding {
1504 line_ending,
1505 lamport_timestamp,
1506 },
1507 true,
1508 cx,
1509 );
1510 }
1511
1512 /// Assign the buffer a new [`Capability`].
1513 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1514 if self.capability != capability {
1515 self.capability = capability;
1516 cx.emit(BufferEvent::CapabilityChanged)
1517 }
1518 }
1519
1520 /// This method is called to signal that the buffer has been saved.
1521 pub fn did_save(
1522 &mut self,
1523 version: clock::Global,
1524 mtime: Option<MTime>,
1525 cx: &mut Context<Self>,
1526 ) {
1527 self.saved_version = version.clone();
1528 self.has_unsaved_edits.set((version, false));
1529 self.has_conflict = false;
1530 self.saved_mtime = mtime;
1531 self.was_changed();
1532 cx.emit(BufferEvent::Saved);
1533 cx.notify();
1534 }
1535
1536 /// Reloads the contents of the buffer from disk.
1537 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1538 let (tx, rx) = futures::channel::oneshot::channel();
1539 let prev_version = self.text.version();
1540 self.reload_task = Some(cx.spawn(async move |this, cx| {
1541 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1542 let file = this.file.as_ref()?.as_local()?;
1543 Some((
1544 file.disk_state().mtime(),
1545 file.load_bytes(cx),
1546 this.encoding,
1547 ))
1548 })?
1549 else {
1550 return Ok(());
1551 };
1552
1553 let bytes = load_bytes_task.await?;
1554 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1555 let new_text = cow.into_owned();
1556
1557 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1558 this.update(cx, |this, cx| {
1559 if this.version() == diff.base_version {
1560 this.finalize_last_transaction();
1561 this.apply_diff(diff, cx);
1562 tx.send(this.finalize_last_transaction().cloned()).ok();
1563 this.has_conflict = false;
1564 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1565 } else {
1566 if !diff.edits.is_empty()
1567 || this
1568 .edits_since::<usize>(&diff.base_version)
1569 .next()
1570 .is_some()
1571 {
1572 this.has_conflict = true;
1573 }
1574
1575 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1576 }
1577
1578 this.reload_task.take();
1579 })
1580 }));
1581 rx
1582 }
1583
1584 /// This method is called to signal that the buffer has been reloaded.
1585 pub fn did_reload(
1586 &mut self,
1587 version: clock::Global,
1588 line_ending: LineEnding,
1589 mtime: Option<MTime>,
1590 cx: &mut Context<Self>,
1591 ) {
1592 self.saved_version = version;
1593 self.has_unsaved_edits
1594 .set((self.saved_version.clone(), false));
1595 self.text.set_line_ending(line_ending);
1596 self.saved_mtime = mtime;
1597 cx.emit(BufferEvent::Reloaded);
1598 cx.notify();
1599 }
1600
1601 /// Updates the [`File`] backing this buffer. This should be called when
1602 /// the file has changed or has been deleted.
1603 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1604 let was_dirty = self.is_dirty();
1605 let mut file_changed = false;
1606
1607 if let Some(old_file) = self.file.as_ref() {
1608 if new_file.path() != old_file.path() {
1609 file_changed = true;
1610 }
1611
1612 let old_state = old_file.disk_state();
1613 let new_state = new_file.disk_state();
1614 if old_state != new_state {
1615 file_changed = true;
1616 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1617 cx.emit(BufferEvent::ReloadNeeded)
1618 }
1619 }
1620 } else {
1621 file_changed = true;
1622 };
1623
1624 self.file = Some(new_file);
1625 if file_changed {
1626 self.was_changed();
1627 self.non_text_state_update_count += 1;
1628 if was_dirty != self.is_dirty() {
1629 cx.emit(BufferEvent::DirtyChanged);
1630 }
1631 cx.emit(BufferEvent::FileHandleChanged);
1632 cx.notify();
1633 }
1634 }
1635
1636 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1637 Some(self.branch_state.as_ref()?.base_buffer.clone())
1638 }
1639
1640 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1641 pub fn language(&self) -> Option<&Arc<Language>> {
1642 self.language.as_ref()
1643 }
1644
1645 /// Returns the [`Language`] at the given location.
1646 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1647 let offset = position.to_offset(self);
1648 let mut is_first = true;
1649 let start_anchor = self.anchor_before(offset);
1650 let end_anchor = self.anchor_after(offset);
1651 self.syntax_map
1652 .lock()
1653 .layers_for_range(offset..offset, &self.text, false)
1654 .filter(|layer| {
1655 if is_first {
1656 is_first = false;
1657 return true;
1658 }
1659
1660 layer
1661 .included_sub_ranges
1662 .map(|sub_ranges| {
1663 sub_ranges.iter().any(|sub_range| {
1664 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1665 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1666 !is_before_start && !is_after_end
1667 })
1668 })
1669 .unwrap_or(true)
1670 })
1671 .last()
1672 .map(|info| info.language.clone())
1673 .or_else(|| self.language.clone())
1674 }
1675
1676 /// Returns each [`Language`] for the active syntax layers at the given location.
1677 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1678 let offset = position.to_offset(self);
1679 let mut languages: Vec<Arc<Language>> = self
1680 .syntax_map
1681 .lock()
1682 .layers_for_range(offset..offset, &self.text, false)
1683 .map(|info| info.language.clone())
1684 .collect();
1685
1686 if languages.is_empty()
1687 && let Some(buffer_language) = self.language()
1688 {
1689 languages.push(buffer_language.clone());
1690 }
1691
1692 languages
1693 }
1694
1695 /// An integer version number that accounts for all updates besides
1696 /// the buffer's text itself (which is versioned via a version vector).
1697 pub fn non_text_state_update_count(&self) -> usize {
1698 self.non_text_state_update_count
1699 }
1700
1701 /// Whether the buffer is being parsed in the background.
1702 #[cfg(any(test, feature = "test-support"))]
1703 pub fn is_parsing(&self) -> bool {
1704 self.reparse.is_some()
1705 }
1706
1707 /// Indicates whether the buffer contains any regions that may be
1708 /// written in a language that hasn't been loaded yet.
1709 pub fn contains_unknown_injections(&self) -> bool {
1710 self.syntax_map.lock().contains_unknown_injections()
1711 }
1712
1713 #[cfg(any(test, feature = "test-support"))]
1714 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1715 self.sync_parse_timeout = timeout;
1716 }
1717
1718 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1719 match Arc::get_mut(&mut self.tree_sitter_data) {
1720 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1721 None => {
1722 let tree_sitter_data = TreeSitterData::new(snapshot);
1723 self.tree_sitter_data = Arc::new(tree_sitter_data)
1724 }
1725 }
1726 }
1727
1728 /// Called after an edit to synchronize the buffer's main parse tree with
1729 /// the buffer's new underlying state.
1730 ///
1731 /// Locks the syntax map and interpolates the edits since the last reparse
1732 /// into the foreground syntax tree.
1733 ///
1734 /// Then takes a stable snapshot of the syntax map before unlocking it.
1735 /// The snapshot with the interpolated edits is sent to a background thread,
1736 /// where we ask Tree-sitter to perform an incremental parse.
1737 ///
1738 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1739 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1740 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1741 ///
1742 /// If we time out waiting on the parse, we spawn a second task waiting
1743 /// until the parse does complete and return with the interpolated tree still
1744 /// in the foreground. When the background parse completes, call back into
1745 /// the main thread and assign the foreground parse state.
1746 ///
1747 /// If the buffer or grammar changed since the start of the background parse,
1748 /// initiate an additional reparse recursively. To avoid concurrent parses
1749 /// for the same buffer, we only initiate a new parse if we are not already
1750 /// parsing in the background.
1751 #[ztracing::instrument(skip_all)]
1752 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1753 if self.text.version() != *self.tree_sitter_data.version() {
1754 self.invalidate_tree_sitter_data(self.text.snapshot());
1755 }
1756 if self.reparse.is_some() {
1757 return;
1758 }
1759 let language = if let Some(language) = self.language.clone() {
1760 language
1761 } else {
1762 return;
1763 };
1764
1765 let text = self.text_snapshot();
1766 let parsed_version = self.version();
1767
1768 let mut syntax_map = self.syntax_map.lock();
1769 syntax_map.interpolate(&text);
1770 let language_registry = syntax_map.language_registry();
1771 let mut syntax_snapshot = syntax_map.snapshot();
1772 drop(syntax_map);
1773
1774 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1775 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1776 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1777 &text,
1778 language_registry.clone(),
1779 language.clone(),
1780 sync_parse_timeout,
1781 ) {
1782 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1783 self.reparse = None;
1784 return;
1785 }
1786 }
1787
1788 let parse_task = cx.background_spawn({
1789 let language = language.clone();
1790 let language_registry = language_registry.clone();
1791 async move {
1792 syntax_snapshot.reparse(&text, language_registry, language);
1793 syntax_snapshot
1794 }
1795 });
1796
1797 self.reparse = Some(cx.spawn(async move |this, cx| {
1798 let new_syntax_map = parse_task.await;
1799 this.update(cx, move |this, cx| {
1800 let grammar_changed = || {
1801 this.language
1802 .as_ref()
1803 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1804 };
1805 let language_registry_changed = || {
1806 new_syntax_map.contains_unknown_injections()
1807 && language_registry.is_some_and(|registry| {
1808 registry.version() != new_syntax_map.language_registry_version()
1809 })
1810 };
1811 let parse_again = this.version.changed_since(&parsed_version)
1812 || language_registry_changed()
1813 || grammar_changed();
1814 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1815 this.reparse = None;
1816 if parse_again {
1817 this.reparse(cx, false);
1818 }
1819 })
1820 .ok();
1821 }));
1822 }
1823
1824 fn did_finish_parsing(
1825 &mut self,
1826 syntax_snapshot: SyntaxSnapshot,
1827 block_budget: Duration,
1828 cx: &mut Context<Self>,
1829 ) {
1830 self.non_text_state_update_count += 1;
1831 self.syntax_map.lock().did_parse(syntax_snapshot);
1832 self.was_changed();
1833 self.request_autoindent(cx, block_budget);
1834 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1835 self.invalidate_tree_sitter_data(self.text.snapshot());
1836 cx.emit(BufferEvent::Reparsed);
1837 cx.notify();
1838 }
1839
1840 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1841 self.parse_status.1.clone()
1842 }
1843
1844 /// Wait until the buffer is no longer parsing
1845 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1846 let mut parse_status = self.parse_status();
1847 async move {
1848 while *parse_status.borrow() != ParseStatus::Idle {
1849 if parse_status.changed().await.is_err() {
1850 break;
1851 }
1852 }
1853 }
1854 }
1855
1856 /// Assign to the buffer a set of diagnostics created by a given language server.
1857 pub fn update_diagnostics(
1858 &mut self,
1859 server_id: LanguageServerId,
1860 diagnostics: DiagnosticSet,
1861 cx: &mut Context<Self>,
1862 ) {
1863 let lamport_timestamp = self.text.lamport_clock.tick();
1864 let op = Operation::UpdateDiagnostics {
1865 server_id,
1866 diagnostics: diagnostics.iter().cloned().collect(),
1867 lamport_timestamp,
1868 };
1869
1870 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1871 self.send_operation(op, true, cx);
1872 }
1873
1874 pub fn buffer_diagnostics(
1875 &self,
1876 for_server: Option<LanguageServerId>,
1877 ) -> Vec<&DiagnosticEntry<Anchor>> {
1878 match for_server {
1879 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1880 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1881 Err(_) => Vec::new(),
1882 },
1883 None => self
1884 .diagnostics
1885 .iter()
1886 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1887 .collect(),
1888 }
1889 }
1890
1891 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1892 if let Some(indent_sizes) = self.compute_autoindents() {
1893 let indent_sizes = cx.background_spawn(indent_sizes);
1894 match cx
1895 .foreground_executor()
1896 .block_with_timeout(block_budget, indent_sizes)
1897 {
1898 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1899 Err(indent_sizes) => {
1900 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1901 let indent_sizes = indent_sizes.await;
1902 this.update(cx, |this, cx| {
1903 this.apply_autoindents(indent_sizes, cx);
1904 })
1905 .ok();
1906 }));
1907 }
1908 }
1909 } else {
1910 self.autoindent_requests.clear();
1911 for tx in self.wait_for_autoindent_txs.drain(..) {
1912 tx.send(()).ok();
1913 }
1914 }
1915 }
1916
1917 fn compute_autoindents(
1918 &self,
1919 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1920 let max_rows_between_yields = 100;
1921 let snapshot = self.snapshot();
1922 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1923 return None;
1924 }
1925
1926 let autoindent_requests = self.autoindent_requests.clone();
1927 Some(async move {
1928 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1929 for request in autoindent_requests {
1930 // Resolve each edited range to its row in the current buffer and in the
1931 // buffer before this batch of edits.
1932 let mut row_ranges = Vec::new();
1933 let mut old_to_new_rows = BTreeMap::new();
1934 let mut language_indent_sizes_by_new_row = Vec::new();
1935 for entry in &request.entries {
1936 let position = entry.range.start;
1937 let new_row = position.to_point(&snapshot).row;
1938 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1939 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1940
1941 if let Some(old_row) = entry.old_row {
1942 old_to_new_rows.insert(old_row, new_row);
1943 }
1944 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1945 }
1946
1947 // Build a map containing the suggested indentation for each of the edited lines
1948 // with respect to the state of the buffer before these edits. This map is keyed
1949 // by the rows for these lines in the current state of the buffer.
1950 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1951 let old_edited_ranges =
1952 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1953 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1954 let mut language_indent_size = IndentSize::default();
1955 for old_edited_range in old_edited_ranges {
1956 let suggestions = request
1957 .before_edit
1958 .suggest_autoindents(old_edited_range.clone())
1959 .into_iter()
1960 .flatten();
1961 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1962 if let Some(suggestion) = suggestion {
1963 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1964
1965 // Find the indent size based on the language for this row.
1966 while let Some((row, size)) = language_indent_sizes.peek() {
1967 if *row > new_row {
1968 break;
1969 }
1970 language_indent_size = *size;
1971 language_indent_sizes.next();
1972 }
1973
1974 let suggested_indent = old_to_new_rows
1975 .get(&suggestion.basis_row)
1976 .and_then(|from_row| {
1977 Some(old_suggestions.get(from_row).copied()?.0)
1978 })
1979 .unwrap_or_else(|| {
1980 request
1981 .before_edit
1982 .indent_size_for_line(suggestion.basis_row)
1983 })
1984 .with_delta(suggestion.delta, language_indent_size);
1985 old_suggestions
1986 .insert(new_row, (suggested_indent, suggestion.within_error));
1987 }
1988 }
1989 yield_now().await;
1990 }
1991
1992 // Compute new suggestions for each line, but only include them in the result
1993 // if they differ from the old suggestion for that line.
1994 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1995 let mut language_indent_size = IndentSize::default();
1996 for (row_range, original_indent_column) in row_ranges {
1997 let new_edited_row_range = if request.is_block_mode {
1998 row_range.start..row_range.start + 1
1999 } else {
2000 row_range.clone()
2001 };
2002
2003 let suggestions = snapshot
2004 .suggest_autoindents(new_edited_row_range.clone())
2005 .into_iter()
2006 .flatten();
2007 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2008 if let Some(suggestion) = suggestion {
2009 // Find the indent size based on the language for this row.
2010 while let Some((row, size)) = language_indent_sizes.peek() {
2011 if *row > new_row {
2012 break;
2013 }
2014 language_indent_size = *size;
2015 language_indent_sizes.next();
2016 }
2017
2018 let suggested_indent = indent_sizes
2019 .get(&suggestion.basis_row)
2020 .copied()
2021 .map(|e| e.0)
2022 .unwrap_or_else(|| {
2023 snapshot.indent_size_for_line(suggestion.basis_row)
2024 })
2025 .with_delta(suggestion.delta, language_indent_size);
2026
2027 if old_suggestions.get(&new_row).is_none_or(
2028 |(old_indentation, was_within_error)| {
2029 suggested_indent != *old_indentation
2030 && (!suggestion.within_error || *was_within_error)
2031 },
2032 ) {
2033 indent_sizes.insert(
2034 new_row,
2035 (suggested_indent, request.ignore_empty_lines),
2036 );
2037 }
2038 }
2039 }
2040
2041 if let (true, Some(original_indent_column)) =
2042 (request.is_block_mode, original_indent_column)
2043 {
2044 let new_indent =
2045 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2046 *indent
2047 } else {
2048 snapshot.indent_size_for_line(row_range.start)
2049 };
2050 let delta = new_indent.len as i64 - original_indent_column as i64;
2051 if delta != 0 {
2052 for row in row_range.skip(1) {
2053 indent_sizes.entry(row).or_insert_with(|| {
2054 let mut size = snapshot.indent_size_for_line(row);
2055 if size.kind == new_indent.kind {
2056 match delta.cmp(&0) {
2057 Ordering::Greater => size.len += delta as u32,
2058 Ordering::Less => {
2059 size.len = size.len.saturating_sub(-delta as u32)
2060 }
2061 Ordering::Equal => {}
2062 }
2063 }
2064 (size, request.ignore_empty_lines)
2065 });
2066 }
2067 }
2068 }
2069
2070 yield_now().await;
2071 }
2072 }
2073
2074 indent_sizes
2075 .into_iter()
2076 .filter_map(|(row, (indent, ignore_empty_lines))| {
2077 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2078 None
2079 } else {
2080 Some((row, indent))
2081 }
2082 })
2083 .collect()
2084 })
2085 }
2086
2087 fn apply_autoindents(
2088 &mut self,
2089 indent_sizes: BTreeMap<u32, IndentSize>,
2090 cx: &mut Context<Self>,
2091 ) {
2092 self.autoindent_requests.clear();
2093 for tx in self.wait_for_autoindent_txs.drain(..) {
2094 tx.send(()).ok();
2095 }
2096
2097 let edits: Vec<_> = indent_sizes
2098 .into_iter()
2099 .filter_map(|(row, indent_size)| {
2100 let current_size = indent_size_for_line(self, row);
2101 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2102 })
2103 .collect();
2104
2105 let preserve_preview = self.preserve_preview();
2106 self.edit(edits, None, cx);
2107 if preserve_preview {
2108 self.refresh_preview();
2109 }
2110 }
2111
2112 /// Create a minimal edit that will cause the given row to be indented
2113 /// with the given size. After applying this edit, the length of the line
2114 /// will always be at least `new_size.len`.
2115 pub fn edit_for_indent_size_adjustment(
2116 row: u32,
2117 current_size: IndentSize,
2118 new_size: IndentSize,
2119 ) -> Option<(Range<Point>, String)> {
2120 if new_size.kind == current_size.kind {
2121 match new_size.len.cmp(¤t_size.len) {
2122 Ordering::Greater => {
2123 let point = Point::new(row, 0);
2124 Some((
2125 point..point,
2126 iter::repeat(new_size.char())
2127 .take((new_size.len - current_size.len) as usize)
2128 .collect::<String>(),
2129 ))
2130 }
2131
2132 Ordering::Less => Some((
2133 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2134 String::new(),
2135 )),
2136
2137 Ordering::Equal => None,
2138 }
2139 } else {
2140 Some((
2141 Point::new(row, 0)..Point::new(row, current_size.len),
2142 iter::repeat(new_size.char())
2143 .take(new_size.len as usize)
2144 .collect::<String>(),
2145 ))
2146 }
2147 }
2148
2149 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2150 /// and the given new text.
2151 pub fn diff<T>(&self, new_text: T, cx: &App) -> Task<Diff>
2152 where
2153 T: AsRef<str> + Send + 'static,
2154 {
2155 let old_text = self.as_rope().clone();
2156 let base_version = self.version();
2157 cx.background_spawn(async move {
2158 let old_text = old_text.to_string();
2159 let mut new_text = new_text.as_ref().to_owned();
2160 let line_ending = LineEnding::detect(&new_text);
2161 LineEnding::normalize(&mut new_text);
2162 let edits = text_diff(&old_text, &new_text);
2163 Diff {
2164 base_version,
2165 line_ending,
2166 edits,
2167 }
2168 })
2169 }
2170
2171 /// Spawns a background task that searches the buffer for any whitespace
2172 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2173 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2174 let old_text = self.as_rope().clone();
2175 let line_ending = self.line_ending();
2176 let base_version = self.version();
2177 cx.background_spawn(async move {
2178 let ranges = trailing_whitespace_ranges(&old_text);
2179 let empty = Arc::<str>::from("");
2180 Diff {
2181 base_version,
2182 line_ending,
2183 edits: ranges
2184 .into_iter()
2185 .map(|range| (range, empty.clone()))
2186 .collect(),
2187 }
2188 })
2189 }
2190
2191 /// Ensures that the buffer ends with a single newline character, and
2192 /// no other whitespace. Skips if the buffer is empty.
2193 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2194 let len = self.len();
2195 if len == 0 {
2196 return;
2197 }
2198 let mut offset = len;
2199 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2200 let non_whitespace_len = chunk
2201 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2202 .len();
2203 offset -= chunk.len();
2204 offset += non_whitespace_len;
2205 if non_whitespace_len != 0 {
2206 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2207 return;
2208 }
2209 break;
2210 }
2211 }
2212 self.edit([(offset..len, "\n")], None, cx);
2213 }
2214
2215 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2216 /// calculated, then adjust the diff to account for those changes, and discard any
2217 /// parts of the diff that conflict with those changes.
2218 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2219 let snapshot = self.snapshot();
2220 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2221 let mut delta = 0;
2222 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2223 while let Some(edit_since) = edits_since.peek() {
2224 // If the edit occurs after a diff hunk, then it does not
2225 // affect that hunk.
2226 if edit_since.old.start > range.end {
2227 break;
2228 }
2229 // If the edit precedes the diff hunk, then adjust the hunk
2230 // to reflect the edit.
2231 else if edit_since.old.end < range.start {
2232 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2233 edits_since.next();
2234 }
2235 // If the edit intersects a diff hunk, then discard that hunk.
2236 else {
2237 return None;
2238 }
2239 }
2240
2241 let start = (range.start as i64 + delta) as usize;
2242 let end = (range.end as i64 + delta) as usize;
2243 Some((start..end, new_text))
2244 });
2245
2246 self.start_transaction();
2247 self.text.set_line_ending(diff.line_ending);
2248 self.edit(adjusted_edits, None, cx);
2249 self.end_transaction(cx)
2250 }
2251
2252 pub fn has_unsaved_edits(&self) -> bool {
2253 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2254
2255 if last_version == self.version {
2256 self.has_unsaved_edits
2257 .set((last_version, has_unsaved_edits));
2258 return has_unsaved_edits;
2259 }
2260
2261 let has_edits = self.has_edits_since(&self.saved_version);
2262 self.has_unsaved_edits
2263 .set((self.version.clone(), has_edits));
2264 has_edits
2265 }
2266
2267 /// Checks if the buffer has unsaved changes.
2268 pub fn is_dirty(&self) -> bool {
2269 if self.capability == Capability::ReadOnly {
2270 return false;
2271 }
2272 if self.has_conflict {
2273 return true;
2274 }
2275 match self.file.as_ref().map(|f| f.disk_state()) {
2276 Some(DiskState::New) | Some(DiskState::Deleted) => {
2277 !self.is_empty() && self.has_unsaved_edits()
2278 }
2279 _ => self.has_unsaved_edits(),
2280 }
2281 }
2282
2283 /// Marks the buffer as having a conflict regardless of current buffer state.
2284 pub fn set_conflict(&mut self) {
2285 self.has_conflict = true;
2286 }
2287
2288 /// Checks if the buffer and its file have both changed since the buffer
2289 /// was last saved or reloaded.
2290 pub fn has_conflict(&self) -> bool {
2291 if self.has_conflict {
2292 return true;
2293 }
2294 let Some(file) = self.file.as_ref() else {
2295 return false;
2296 };
2297 match file.disk_state() {
2298 DiskState::New => false,
2299 DiskState::Present { mtime } => match self.saved_mtime {
2300 Some(saved_mtime) => {
2301 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2302 }
2303 None => true,
2304 },
2305 DiskState::Deleted => false,
2306 DiskState::Historic { .. } => false,
2307 }
2308 }
2309
2310 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2311 pub fn subscribe(&mut self) -> Subscription<usize> {
2312 self.text.subscribe()
2313 }
2314
2315 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2316 ///
2317 /// This allows downstream code to check if the buffer's text has changed without
2318 /// waiting for an effect cycle, which would be required if using eents.
2319 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2320 if let Err(ix) = self
2321 .change_bits
2322 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2323 {
2324 self.change_bits.insert(ix, bit);
2325 }
2326 }
2327
2328 /// Set the change bit for all "listeners".
2329 fn was_changed(&mut self) {
2330 self.change_bits.retain(|change_bit| {
2331 change_bit
2332 .upgrade()
2333 .inspect(|bit| {
2334 _ = bit.replace(true);
2335 })
2336 .is_some()
2337 });
2338 }
2339
2340 /// Starts a transaction, if one is not already in-progress. When undoing or
2341 /// redoing edits, all of the edits performed within a transaction are undone
2342 /// or redone together.
2343 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2344 self.start_transaction_at(Instant::now())
2345 }
2346
2347 /// Starts a transaction, providing the current time. Subsequent transactions
2348 /// that occur within a short period of time will be grouped together. This
2349 /// is controlled by the buffer's undo grouping duration.
2350 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2351 self.transaction_depth += 1;
2352 if self.was_dirty_before_starting_transaction.is_none() {
2353 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2354 }
2355 self.text.start_transaction_at(now)
2356 }
2357
2358 /// Terminates the current transaction, if this is the outermost transaction.
2359 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2360 self.end_transaction_at(Instant::now(), cx)
2361 }
2362
2363 /// Terminates the current transaction, providing the current time. Subsequent transactions
2364 /// that occur within a short period of time will be grouped together. This
2365 /// is controlled by the buffer's undo grouping duration.
2366 pub fn end_transaction_at(
2367 &mut self,
2368 now: Instant,
2369 cx: &mut Context<Self>,
2370 ) -> Option<TransactionId> {
2371 assert!(self.transaction_depth > 0);
2372 self.transaction_depth -= 1;
2373 let was_dirty = if self.transaction_depth == 0 {
2374 self.was_dirty_before_starting_transaction.take().unwrap()
2375 } else {
2376 false
2377 };
2378 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2379 self.did_edit(&start_version, was_dirty, cx);
2380 Some(transaction_id)
2381 } else {
2382 None
2383 }
2384 }
2385
2386 /// Manually add a transaction to the buffer's undo history.
2387 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2388 self.text.push_transaction(transaction, now);
2389 }
2390
2391 /// Differs from `push_transaction` in that it does not clear the redo
2392 /// stack. Intended to be used to create a parent transaction to merge
2393 /// potential child transactions into.
2394 ///
2395 /// The caller is responsible for removing it from the undo history using
2396 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2397 /// are merged into this transaction, the caller is responsible for ensuring
2398 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2399 /// cleared is to create transactions with the usual `start_transaction` and
2400 /// `end_transaction` methods and merging the resulting transactions into
2401 /// the transaction created by this method
2402 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2403 self.text.push_empty_transaction(now)
2404 }
2405
2406 /// Prevent the last transaction from being grouped with any subsequent transactions,
2407 /// even if they occur with the buffer's undo grouping duration.
2408 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2409 self.text.finalize_last_transaction()
2410 }
2411
2412 /// Manually group all changes since a given transaction.
2413 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2414 self.text.group_until_transaction(transaction_id);
2415 }
2416
2417 /// Manually remove a transaction from the buffer's undo history
2418 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2419 self.text.forget_transaction(transaction_id)
2420 }
2421
2422 /// Retrieve a transaction from the buffer's undo history
2423 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2424 self.text.get_transaction(transaction_id)
2425 }
2426
2427 /// Manually merge two transactions in the buffer's undo history.
2428 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2429 self.text.merge_transactions(transaction, destination);
2430 }
2431
2432 /// Waits for the buffer to receive operations with the given timestamps.
2433 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2434 &mut self,
2435 edit_ids: It,
2436 ) -> impl Future<Output = Result<()>> + use<It> {
2437 self.text.wait_for_edits(edit_ids)
2438 }
2439
2440 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2441 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2442 &mut self,
2443 anchors: It,
2444 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2445 self.text.wait_for_anchors(anchors)
2446 }
2447
2448 /// Waits for the buffer to receive operations up to the given version.
2449 pub fn wait_for_version(
2450 &mut self,
2451 version: clock::Global,
2452 ) -> impl Future<Output = Result<()>> + use<> {
2453 self.text.wait_for_version(version)
2454 }
2455
2456 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2457 /// [`Buffer::wait_for_version`] to resolve with an error.
2458 pub fn give_up_waiting(&mut self) {
2459 self.text.give_up_waiting();
2460 }
2461
2462 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2463 let mut rx = None;
2464 if !self.autoindent_requests.is_empty() {
2465 let channel = oneshot::channel();
2466 self.wait_for_autoindent_txs.push(channel.0);
2467 rx = Some(channel.1);
2468 }
2469 rx
2470 }
2471
2472 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2473 pub fn set_active_selections(
2474 &mut self,
2475 selections: Arc<[Selection<Anchor>]>,
2476 line_mode: bool,
2477 cursor_shape: CursorShape,
2478 cx: &mut Context<Self>,
2479 ) {
2480 let lamport_timestamp = self.text.lamport_clock.tick();
2481 self.remote_selections.insert(
2482 self.text.replica_id(),
2483 SelectionSet {
2484 selections: selections.clone(),
2485 lamport_timestamp,
2486 line_mode,
2487 cursor_shape,
2488 },
2489 );
2490 self.send_operation(
2491 Operation::UpdateSelections {
2492 selections,
2493 line_mode,
2494 lamport_timestamp,
2495 cursor_shape,
2496 },
2497 true,
2498 cx,
2499 );
2500 self.non_text_state_update_count += 1;
2501 cx.notify();
2502 }
2503
2504 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2505 /// this replica.
2506 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2507 if self
2508 .remote_selections
2509 .get(&self.text.replica_id())
2510 .is_none_or(|set| !set.selections.is_empty())
2511 {
2512 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2513 }
2514 }
2515
2516 pub fn set_agent_selections(
2517 &mut self,
2518 selections: Arc<[Selection<Anchor>]>,
2519 line_mode: bool,
2520 cursor_shape: CursorShape,
2521 cx: &mut Context<Self>,
2522 ) {
2523 let lamport_timestamp = self.text.lamport_clock.tick();
2524 self.remote_selections.insert(
2525 ReplicaId::AGENT,
2526 SelectionSet {
2527 selections,
2528 lamport_timestamp,
2529 line_mode,
2530 cursor_shape,
2531 },
2532 );
2533 self.non_text_state_update_count += 1;
2534 cx.notify();
2535 }
2536
2537 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2538 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2539 }
2540
2541 /// Replaces the buffer's entire text.
2542 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2543 where
2544 T: Into<Arc<str>>,
2545 {
2546 self.autoindent_requests.clear();
2547 self.edit([(0..self.len(), text)], None, cx)
2548 }
2549
2550 /// Appends the given text to the end of the buffer.
2551 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2552 where
2553 T: Into<Arc<str>>,
2554 {
2555 self.edit([(self.len()..self.len(), text)], None, cx)
2556 }
2557
2558 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2559 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2560 ///
2561 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2562 /// request for the edited ranges, which will be processed when the buffer finishes
2563 /// parsing.
2564 ///
2565 /// Parsing takes place at the end of a transaction, and may compute synchronously
2566 /// or asynchronously, depending on the changes.
2567 pub fn edit<I, S, T>(
2568 &mut self,
2569 edits_iter: I,
2570 autoindent_mode: Option<AutoindentMode>,
2571 cx: &mut Context<Self>,
2572 ) -> Option<clock::Lamport>
2573 where
2574 I: IntoIterator<Item = (Range<S>, T)>,
2575 S: ToOffset,
2576 T: Into<Arc<str>>,
2577 {
2578 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2579 }
2580
2581 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2582 pub fn edit_non_coalesce<I, S, T>(
2583 &mut self,
2584 edits_iter: I,
2585 autoindent_mode: Option<AutoindentMode>,
2586 cx: &mut Context<Self>,
2587 ) -> Option<clock::Lamport>
2588 where
2589 I: IntoIterator<Item = (Range<S>, T)>,
2590 S: ToOffset,
2591 T: Into<Arc<str>>,
2592 {
2593 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2594 }
2595
2596 fn edit_internal<I, S, T>(
2597 &mut self,
2598 edits_iter: I,
2599 autoindent_mode: Option<AutoindentMode>,
2600 coalesce_adjacent: bool,
2601 cx: &mut Context<Self>,
2602 ) -> Option<clock::Lamport>
2603 where
2604 I: IntoIterator<Item = (Range<S>, T)>,
2605 S: ToOffset,
2606 T: Into<Arc<str>>,
2607 {
2608 // Skip invalid edits and coalesce contiguous ones.
2609 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2610
2611 for (range, new_text) in edits_iter {
2612 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2613
2614 if range.start > range.end {
2615 mem::swap(&mut range.start, &mut range.end);
2616 }
2617 let new_text = new_text.into();
2618 if !new_text.is_empty() || !range.is_empty() {
2619 let prev_edit = edits.last_mut();
2620 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2621 if coalesce_adjacent {
2622 prev_range.end >= range.start
2623 } else {
2624 prev_range.end > range.start
2625 }
2626 });
2627
2628 if let Some((prev_range, prev_text)) = prev_edit
2629 && should_coalesce
2630 {
2631 prev_range.end = cmp::max(prev_range.end, range.end);
2632 *prev_text = format!("{prev_text}{new_text}").into();
2633 } else {
2634 edits.push((range, new_text));
2635 }
2636 }
2637 }
2638 if edits.is_empty() {
2639 return None;
2640 }
2641
2642 self.start_transaction();
2643 self.pending_autoindent.take();
2644 let autoindent_request = autoindent_mode
2645 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2646
2647 let edit_operation = self.text.edit(edits.iter().cloned());
2648 let edit_id = edit_operation.timestamp();
2649
2650 if let Some((before_edit, mode)) = autoindent_request {
2651 let mut delta = 0isize;
2652 let mut previous_setting = None;
2653 let entries: Vec<_> = edits
2654 .into_iter()
2655 .enumerate()
2656 .zip(&edit_operation.as_edit().unwrap().new_text)
2657 .filter(|((_, (range, _)), _)| {
2658 let language = before_edit.language_at(range.start);
2659 let language_id = language.map(|l| l.id());
2660 if let Some((cached_language_id, auto_indent)) = previous_setting
2661 && cached_language_id == language_id
2662 {
2663 auto_indent
2664 } else {
2665 // The auto-indent setting is not present in editorconfigs, hence
2666 // we can avoid passing the file here.
2667 let auto_indent =
2668 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2669 previous_setting = Some((language_id, auto_indent));
2670 auto_indent
2671 }
2672 })
2673 .map(|((ix, (range, _)), new_text)| {
2674 let new_text_length = new_text.len();
2675 let old_start = range.start.to_point(&before_edit);
2676 let new_start = (delta + range.start as isize) as usize;
2677 let range_len = range.end - range.start;
2678 delta += new_text_length as isize - range_len as isize;
2679
2680 // Decide what range of the insertion to auto-indent, and whether
2681 // the first line of the insertion should be considered a newly-inserted line
2682 // or an edit to an existing line.
2683 let mut range_of_insertion_to_indent = 0..new_text_length;
2684 let mut first_line_is_new = true;
2685
2686 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2687 let old_line_end = before_edit.line_len(old_start.row);
2688
2689 if old_start.column > old_line_start {
2690 first_line_is_new = false;
2691 }
2692
2693 if !new_text.contains('\n')
2694 && (old_start.column + (range_len as u32) < old_line_end
2695 || old_line_end == old_line_start)
2696 {
2697 first_line_is_new = false;
2698 }
2699
2700 // When inserting text starting with a newline, avoid auto-indenting the
2701 // previous line.
2702 if new_text.starts_with('\n') {
2703 range_of_insertion_to_indent.start += 1;
2704 first_line_is_new = true;
2705 }
2706
2707 let mut original_indent_column = None;
2708 if let AutoindentMode::Block {
2709 original_indent_columns,
2710 } = &mode
2711 {
2712 original_indent_column = Some(if new_text.starts_with('\n') {
2713 indent_size_for_text(
2714 new_text[range_of_insertion_to_indent.clone()].chars(),
2715 )
2716 .len
2717 } else {
2718 original_indent_columns
2719 .get(ix)
2720 .copied()
2721 .flatten()
2722 .unwrap_or_else(|| {
2723 indent_size_for_text(
2724 new_text[range_of_insertion_to_indent.clone()].chars(),
2725 )
2726 .len
2727 })
2728 });
2729
2730 // Avoid auto-indenting the line after the edit.
2731 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2732 range_of_insertion_to_indent.end -= 1;
2733 }
2734 }
2735
2736 AutoindentRequestEntry {
2737 original_indent_column,
2738 old_row: if first_line_is_new {
2739 None
2740 } else {
2741 Some(old_start.row)
2742 },
2743 indent_size: before_edit.language_indent_size_at(range.start, cx),
2744 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2745 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2746 }
2747 })
2748 .collect();
2749
2750 if !entries.is_empty() {
2751 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2752 before_edit,
2753 entries,
2754 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2755 ignore_empty_lines: false,
2756 }));
2757 }
2758 }
2759
2760 self.end_transaction(cx);
2761 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2762 Some(edit_id)
2763 }
2764
2765 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2766 self.was_changed();
2767
2768 if self.edits_since::<usize>(old_version).next().is_none() {
2769 return;
2770 }
2771
2772 self.reparse(cx, true);
2773 cx.emit(BufferEvent::Edited);
2774 if was_dirty != self.is_dirty() {
2775 cx.emit(BufferEvent::DirtyChanged);
2776 }
2777 cx.notify();
2778 }
2779
2780 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2781 where
2782 I: IntoIterator<Item = Range<T>>,
2783 T: ToOffset + Copy,
2784 {
2785 let before_edit = self.snapshot();
2786 let entries = ranges
2787 .into_iter()
2788 .map(|range| AutoindentRequestEntry {
2789 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2790 old_row: None,
2791 indent_size: before_edit.language_indent_size_at(range.start, cx),
2792 original_indent_column: None,
2793 })
2794 .collect();
2795 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2796 before_edit,
2797 entries,
2798 is_block_mode: false,
2799 ignore_empty_lines: true,
2800 }));
2801 self.request_autoindent(cx, Duration::from_micros(300));
2802 }
2803
2804 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2805 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2806 pub fn insert_empty_line(
2807 &mut self,
2808 position: impl ToPoint,
2809 space_above: bool,
2810 space_below: bool,
2811 cx: &mut Context<Self>,
2812 ) -> Point {
2813 let mut position = position.to_point(self);
2814
2815 self.start_transaction();
2816
2817 self.edit(
2818 [(position..position, "\n")],
2819 Some(AutoindentMode::EachLine),
2820 cx,
2821 );
2822
2823 if position.column > 0 {
2824 position += Point::new(1, 0);
2825 }
2826
2827 if !self.is_line_blank(position.row) {
2828 self.edit(
2829 [(position..position, "\n")],
2830 Some(AutoindentMode::EachLine),
2831 cx,
2832 );
2833 }
2834
2835 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2836 self.edit(
2837 [(position..position, "\n")],
2838 Some(AutoindentMode::EachLine),
2839 cx,
2840 );
2841 position.row += 1;
2842 }
2843
2844 if space_below
2845 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2846 {
2847 self.edit(
2848 [(position..position, "\n")],
2849 Some(AutoindentMode::EachLine),
2850 cx,
2851 );
2852 }
2853
2854 self.end_transaction(cx);
2855
2856 position
2857 }
2858
2859 /// Applies the given remote operations to the buffer.
2860 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2861 self.pending_autoindent.take();
2862 let was_dirty = self.is_dirty();
2863 let old_version = self.version.clone();
2864 let mut deferred_ops = Vec::new();
2865 let buffer_ops = ops
2866 .into_iter()
2867 .filter_map(|op| match op {
2868 Operation::Buffer(op) => Some(op),
2869 _ => {
2870 if self.can_apply_op(&op) {
2871 self.apply_op(op, cx);
2872 } else {
2873 deferred_ops.push(op);
2874 }
2875 None
2876 }
2877 })
2878 .collect::<Vec<_>>();
2879 for operation in buffer_ops.iter() {
2880 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2881 }
2882 self.text.apply_ops(buffer_ops);
2883 self.deferred_ops.insert(deferred_ops);
2884 self.flush_deferred_ops(cx);
2885 self.did_edit(&old_version, was_dirty, cx);
2886 // Notify independently of whether the buffer was edited as the operations could include a
2887 // selection update.
2888 cx.notify();
2889 }
2890
2891 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2892 let mut deferred_ops = Vec::new();
2893 for op in self.deferred_ops.drain().iter().cloned() {
2894 if self.can_apply_op(&op) {
2895 self.apply_op(op, cx);
2896 } else {
2897 deferred_ops.push(op);
2898 }
2899 }
2900 self.deferred_ops.insert(deferred_ops);
2901 }
2902
2903 pub fn has_deferred_ops(&self) -> bool {
2904 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2905 }
2906
2907 fn can_apply_op(&self, operation: &Operation) -> bool {
2908 match operation {
2909 Operation::Buffer(_) => {
2910 unreachable!("buffer operations should never be applied at this layer")
2911 }
2912 Operation::UpdateDiagnostics {
2913 diagnostics: diagnostic_set,
2914 ..
2915 } => diagnostic_set.iter().all(|diagnostic| {
2916 self.text.can_resolve(&diagnostic.range.start)
2917 && self.text.can_resolve(&diagnostic.range.end)
2918 }),
2919 Operation::UpdateSelections { selections, .. } => selections
2920 .iter()
2921 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2922 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2923 }
2924 }
2925
2926 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2927 match operation {
2928 Operation::Buffer(_) => {
2929 unreachable!("buffer operations should never be applied at this layer")
2930 }
2931 Operation::UpdateDiagnostics {
2932 server_id,
2933 diagnostics: diagnostic_set,
2934 lamport_timestamp,
2935 } => {
2936 let snapshot = self.snapshot();
2937 self.apply_diagnostic_update(
2938 server_id,
2939 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2940 lamport_timestamp,
2941 cx,
2942 );
2943 }
2944 Operation::UpdateSelections {
2945 selections,
2946 lamport_timestamp,
2947 line_mode,
2948 cursor_shape,
2949 } => {
2950 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2951 && set.lamport_timestamp > lamport_timestamp
2952 {
2953 return;
2954 }
2955
2956 self.remote_selections.insert(
2957 lamport_timestamp.replica_id,
2958 SelectionSet {
2959 selections,
2960 lamport_timestamp,
2961 line_mode,
2962 cursor_shape,
2963 },
2964 );
2965 self.text.lamport_clock.observe(lamport_timestamp);
2966 self.non_text_state_update_count += 1;
2967 }
2968 Operation::UpdateCompletionTriggers {
2969 triggers,
2970 lamport_timestamp,
2971 server_id,
2972 } => {
2973 if triggers.is_empty() {
2974 self.completion_triggers_per_language_server
2975 .remove(&server_id);
2976 self.completion_triggers = self
2977 .completion_triggers_per_language_server
2978 .values()
2979 .flat_map(|triggers| triggers.iter().cloned())
2980 .collect();
2981 } else {
2982 self.completion_triggers_per_language_server
2983 .insert(server_id, triggers.iter().cloned().collect());
2984 self.completion_triggers.extend(triggers);
2985 }
2986 self.text.lamport_clock.observe(lamport_timestamp);
2987 }
2988 Operation::UpdateLineEnding {
2989 line_ending,
2990 lamport_timestamp,
2991 } => {
2992 self.text.set_line_ending(line_ending);
2993 self.text.lamport_clock.observe(lamport_timestamp);
2994 }
2995 }
2996 }
2997
2998 fn apply_diagnostic_update(
2999 &mut self,
3000 server_id: LanguageServerId,
3001 diagnostics: DiagnosticSet,
3002 lamport_timestamp: clock::Lamport,
3003 cx: &mut Context<Self>,
3004 ) {
3005 if lamport_timestamp > self.diagnostics_timestamp {
3006 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3007 if diagnostics.is_empty() {
3008 if let Ok(ix) = ix {
3009 self.diagnostics.remove(ix);
3010 }
3011 } else {
3012 match ix {
3013 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3014 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3015 };
3016 }
3017 self.diagnostics_timestamp = lamport_timestamp;
3018 self.non_text_state_update_count += 1;
3019 self.text.lamport_clock.observe(lamport_timestamp);
3020 cx.notify();
3021 cx.emit(BufferEvent::DiagnosticsUpdated);
3022 }
3023 }
3024
3025 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3026 self.was_changed();
3027 cx.emit(BufferEvent::Operation {
3028 operation,
3029 is_local,
3030 });
3031 }
3032
3033 /// Removes the selections for a given peer.
3034 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3035 self.remote_selections.remove(&replica_id);
3036 cx.notify();
3037 }
3038
3039 /// Undoes the most recent transaction.
3040 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3041 let was_dirty = self.is_dirty();
3042 let old_version = self.version.clone();
3043
3044 if let Some((transaction_id, operation)) = self.text.undo() {
3045 self.send_operation(Operation::Buffer(operation), true, cx);
3046 self.did_edit(&old_version, was_dirty, cx);
3047 Some(transaction_id)
3048 } else {
3049 None
3050 }
3051 }
3052
3053 /// Manually undoes a specific transaction in the buffer's undo history.
3054 pub fn undo_transaction(
3055 &mut self,
3056 transaction_id: TransactionId,
3057 cx: &mut Context<Self>,
3058 ) -> bool {
3059 let was_dirty = self.is_dirty();
3060 let old_version = self.version.clone();
3061 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3062 self.send_operation(Operation::Buffer(operation), true, cx);
3063 self.did_edit(&old_version, was_dirty, cx);
3064 true
3065 } else {
3066 false
3067 }
3068 }
3069
3070 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3071 pub fn undo_to_transaction(
3072 &mut self,
3073 transaction_id: TransactionId,
3074 cx: &mut Context<Self>,
3075 ) -> bool {
3076 let was_dirty = self.is_dirty();
3077 let old_version = self.version.clone();
3078
3079 let operations = self.text.undo_to_transaction(transaction_id);
3080 let undone = !operations.is_empty();
3081 for operation in operations {
3082 self.send_operation(Operation::Buffer(operation), true, cx);
3083 }
3084 if undone {
3085 self.did_edit(&old_version, was_dirty, cx)
3086 }
3087 undone
3088 }
3089
3090 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3091 let was_dirty = self.is_dirty();
3092 let operation = self.text.undo_operations(counts);
3093 let old_version = self.version.clone();
3094 self.send_operation(Operation::Buffer(operation), true, cx);
3095 self.did_edit(&old_version, was_dirty, cx);
3096 }
3097
3098 /// Manually redoes a specific transaction in the buffer's redo history.
3099 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3100 let was_dirty = self.is_dirty();
3101 let old_version = self.version.clone();
3102
3103 if let Some((transaction_id, operation)) = self.text.redo() {
3104 self.send_operation(Operation::Buffer(operation), true, cx);
3105 self.did_edit(&old_version, was_dirty, cx);
3106 Some(transaction_id)
3107 } else {
3108 None
3109 }
3110 }
3111
3112 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3113 pub fn redo_to_transaction(
3114 &mut self,
3115 transaction_id: TransactionId,
3116 cx: &mut Context<Self>,
3117 ) -> bool {
3118 let was_dirty = self.is_dirty();
3119 let old_version = self.version.clone();
3120
3121 let operations = self.text.redo_to_transaction(transaction_id);
3122 let redone = !operations.is_empty();
3123 for operation in operations {
3124 self.send_operation(Operation::Buffer(operation), true, cx);
3125 }
3126 if redone {
3127 self.did_edit(&old_version, was_dirty, cx)
3128 }
3129 redone
3130 }
3131
3132 /// Override current completion triggers with the user-provided completion triggers.
3133 pub fn set_completion_triggers(
3134 &mut self,
3135 server_id: LanguageServerId,
3136 triggers: BTreeSet<String>,
3137 cx: &mut Context<Self>,
3138 ) {
3139 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3140 if triggers.is_empty() {
3141 self.completion_triggers_per_language_server
3142 .remove(&server_id);
3143 self.completion_triggers = self
3144 .completion_triggers_per_language_server
3145 .values()
3146 .flat_map(|triggers| triggers.iter().cloned())
3147 .collect();
3148 } else {
3149 self.completion_triggers_per_language_server
3150 .insert(server_id, triggers.clone());
3151 self.completion_triggers.extend(triggers.iter().cloned());
3152 }
3153 self.send_operation(
3154 Operation::UpdateCompletionTriggers {
3155 triggers: triggers.into_iter().collect(),
3156 lamport_timestamp: self.completion_triggers_timestamp,
3157 server_id,
3158 },
3159 true,
3160 cx,
3161 );
3162 cx.notify();
3163 }
3164
3165 /// Returns a list of strings which trigger a completion menu for this language.
3166 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3167 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3168 &self.completion_triggers
3169 }
3170
3171 /// Call this directly after performing edits to prevent the preview tab
3172 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3173 /// to return false until there are additional edits.
3174 pub fn refresh_preview(&mut self) {
3175 self.preview_version = self.version.clone();
3176 }
3177
3178 /// Whether we should preserve the preview status of a tab containing this buffer.
3179 pub fn preserve_preview(&self) -> bool {
3180 !self.has_edits_since(&self.preview_version)
3181 }
3182}
3183
3184#[doc(hidden)]
3185#[cfg(any(test, feature = "test-support"))]
3186impl Buffer {
3187 pub fn edit_via_marked_text(
3188 &mut self,
3189 marked_string: &str,
3190 autoindent_mode: Option<AutoindentMode>,
3191 cx: &mut Context<Self>,
3192 ) {
3193 let edits = self.edits_for_marked_text(marked_string);
3194 self.edit(edits, autoindent_mode, cx);
3195 }
3196
3197 pub fn set_group_interval(&mut self, group_interval: Duration) {
3198 self.text.set_group_interval(group_interval);
3199 }
3200
3201 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3202 where
3203 T: rand::Rng,
3204 {
3205 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3206 let mut last_end = None;
3207 for _ in 0..old_range_count {
3208 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3209 break;
3210 }
3211
3212 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3213 let mut range = self.random_byte_range(new_start, rng);
3214 if rng.random_bool(0.2) {
3215 mem::swap(&mut range.start, &mut range.end);
3216 }
3217 last_end = Some(range.end);
3218
3219 let new_text_len = rng.random_range(0..10);
3220 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3221 new_text = new_text.to_uppercase();
3222
3223 edits.push((range, new_text));
3224 }
3225 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3226 self.edit(edits, None, cx);
3227 }
3228
3229 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3230 let was_dirty = self.is_dirty();
3231 let old_version = self.version.clone();
3232
3233 let ops = self.text.randomly_undo_redo(rng);
3234 if !ops.is_empty() {
3235 for op in ops {
3236 self.send_operation(Operation::Buffer(op), true, cx);
3237 self.did_edit(&old_version, was_dirty, cx);
3238 }
3239 }
3240 }
3241}
3242
3243impl EventEmitter<BufferEvent> for Buffer {}
3244
3245impl Deref for Buffer {
3246 type Target = TextBuffer;
3247
3248 fn deref(&self) -> &Self::Target {
3249 &self.text
3250 }
3251}
3252
3253impl BufferSnapshot {
3254 /// Returns [`IndentSize`] for a given line that respects user settings and
3255 /// language preferences.
3256 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3257 indent_size_for_line(self, row)
3258 }
3259
3260 /// Returns [`IndentSize`] for a given position that respects user settings
3261 /// and language preferences.
3262 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3263 let settings = language_settings(
3264 self.language_at(position).map(|l| l.name()),
3265 self.file(),
3266 cx,
3267 );
3268 if settings.hard_tabs {
3269 IndentSize::tab()
3270 } else {
3271 IndentSize::spaces(settings.tab_size.get())
3272 }
3273 }
3274
3275 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3276 /// is passed in as `single_indent_size`.
3277 pub fn suggested_indents(
3278 &self,
3279 rows: impl Iterator<Item = u32>,
3280 single_indent_size: IndentSize,
3281 ) -> BTreeMap<u32, IndentSize> {
3282 let mut result = BTreeMap::new();
3283
3284 for row_range in contiguous_ranges(rows, 10) {
3285 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3286 Some(suggestions) => suggestions,
3287 _ => break,
3288 };
3289
3290 for (row, suggestion) in row_range.zip(suggestions) {
3291 let indent_size = if let Some(suggestion) = suggestion {
3292 result
3293 .get(&suggestion.basis_row)
3294 .copied()
3295 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3296 .with_delta(suggestion.delta, single_indent_size)
3297 } else {
3298 self.indent_size_for_line(row)
3299 };
3300
3301 result.insert(row, indent_size);
3302 }
3303 }
3304
3305 result
3306 }
3307
3308 fn suggest_autoindents(
3309 &self,
3310 row_range: Range<u32>,
3311 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3312 let config = &self.language.as_ref()?.config;
3313 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3314
3315 #[derive(Debug, Clone)]
3316 struct StartPosition {
3317 start: Point,
3318 suffix: SharedString,
3319 language: Arc<Language>,
3320 }
3321
3322 // Find the suggested indentation ranges based on the syntax tree.
3323 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3324 let end = Point::new(row_range.end, 0);
3325 let range = (start..end).to_offset(&self.text);
3326 let mut matches = self.syntax.matches_with_options(
3327 range.clone(),
3328 &self.text,
3329 TreeSitterOptions {
3330 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3331 max_start_depth: None,
3332 },
3333 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3334 );
3335 let indent_configs = matches
3336 .grammars()
3337 .iter()
3338 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3339 .collect::<Vec<_>>();
3340
3341 let mut indent_ranges = Vec::<Range<Point>>::new();
3342 let mut start_positions = Vec::<StartPosition>::new();
3343 let mut outdent_positions = Vec::<Point>::new();
3344 while let Some(mat) = matches.peek() {
3345 let mut start: Option<Point> = None;
3346 let mut end: Option<Point> = None;
3347
3348 let config = indent_configs[mat.grammar_index];
3349 for capture in mat.captures {
3350 if capture.index == config.indent_capture_ix {
3351 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3352 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3353 } else if Some(capture.index) == config.start_capture_ix {
3354 start = Some(Point::from_ts_point(capture.node.end_position()));
3355 } else if Some(capture.index) == config.end_capture_ix {
3356 end = Some(Point::from_ts_point(capture.node.start_position()));
3357 } else if Some(capture.index) == config.outdent_capture_ix {
3358 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3359 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3360 start_positions.push(StartPosition {
3361 start: Point::from_ts_point(capture.node.start_position()),
3362 suffix: suffix.clone(),
3363 language: mat.language.clone(),
3364 });
3365 }
3366 }
3367
3368 matches.advance();
3369 if let Some((start, end)) = start.zip(end) {
3370 if start.row == end.row {
3371 continue;
3372 }
3373 let range = start..end;
3374 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3375 Err(ix) => indent_ranges.insert(ix, range),
3376 Ok(ix) => {
3377 let prev_range = &mut indent_ranges[ix];
3378 prev_range.end = prev_range.end.max(range.end);
3379 }
3380 }
3381 }
3382 }
3383
3384 let mut error_ranges = Vec::<Range<Point>>::new();
3385 let mut matches = self
3386 .syntax
3387 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3388 while let Some(mat) = matches.peek() {
3389 let node = mat.captures[0].node;
3390 let start = Point::from_ts_point(node.start_position());
3391 let end = Point::from_ts_point(node.end_position());
3392 let range = start..end;
3393 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3394 Ok(ix) | Err(ix) => ix,
3395 };
3396 let mut end_ix = ix;
3397 while let Some(existing_range) = error_ranges.get(end_ix) {
3398 if existing_range.end < end {
3399 end_ix += 1;
3400 } else {
3401 break;
3402 }
3403 }
3404 error_ranges.splice(ix..end_ix, [range]);
3405 matches.advance();
3406 }
3407
3408 outdent_positions.sort();
3409 for outdent_position in outdent_positions {
3410 // find the innermost indent range containing this outdent_position
3411 // set its end to the outdent position
3412 if let Some(range_to_truncate) = indent_ranges
3413 .iter_mut()
3414 .rfind(|indent_range| indent_range.contains(&outdent_position))
3415 {
3416 range_to_truncate.end = outdent_position;
3417 }
3418 }
3419
3420 start_positions.sort_by_key(|b| b.start);
3421
3422 // Find the suggested indentation increases and decreased based on regexes.
3423 let mut regex_outdent_map = HashMap::default();
3424 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3425 let mut start_positions_iter = start_positions.iter().peekable();
3426
3427 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3428 self.for_each_line(
3429 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3430 ..Point::new(row_range.end, 0),
3431 |row, line| {
3432 let indent_len = self.indent_size_for_line(row).len;
3433 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3434 let row_language_config = row_language
3435 .as_ref()
3436 .map(|lang| lang.config())
3437 .unwrap_or(config);
3438
3439 if row_language_config
3440 .decrease_indent_pattern
3441 .as_ref()
3442 .is_some_and(|regex| regex.is_match(line))
3443 {
3444 indent_change_rows.push((row, Ordering::Less));
3445 }
3446 if row_language_config
3447 .increase_indent_pattern
3448 .as_ref()
3449 .is_some_and(|regex| regex.is_match(line))
3450 {
3451 indent_change_rows.push((row + 1, Ordering::Greater));
3452 }
3453 while let Some(pos) = start_positions_iter.peek() {
3454 if pos.start.row < row {
3455 let pos = start_positions_iter.next().unwrap().clone();
3456 last_seen_suffix
3457 .entry(pos.suffix.to_string())
3458 .or_default()
3459 .push(pos);
3460 } else {
3461 break;
3462 }
3463 }
3464 for rule in &row_language_config.decrease_indent_patterns {
3465 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3466 let row_start_column = self.indent_size_for_line(row).len;
3467 let basis_row = rule
3468 .valid_after
3469 .iter()
3470 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3471 .flatten()
3472 .filter(|pos| {
3473 row_language
3474 .as_ref()
3475 .or(self.language.as_ref())
3476 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3477 })
3478 .filter(|pos| pos.start.column <= row_start_column)
3479 .max_by_key(|pos| pos.start.row);
3480 if let Some(outdent_to) = basis_row {
3481 regex_outdent_map.insert(row, outdent_to.start.row);
3482 }
3483 break;
3484 }
3485 }
3486 },
3487 );
3488
3489 let mut indent_changes = indent_change_rows.into_iter().peekable();
3490 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3491 prev_non_blank_row.unwrap_or(0)
3492 } else {
3493 row_range.start.saturating_sub(1)
3494 };
3495
3496 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3497 Some(row_range.map(move |row| {
3498 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3499
3500 let mut indent_from_prev_row = false;
3501 let mut outdent_from_prev_row = false;
3502 let mut outdent_to_row = u32::MAX;
3503 let mut from_regex = false;
3504
3505 while let Some((indent_row, delta)) = indent_changes.peek() {
3506 match indent_row.cmp(&row) {
3507 Ordering::Equal => match delta {
3508 Ordering::Less => {
3509 from_regex = true;
3510 outdent_from_prev_row = true
3511 }
3512 Ordering::Greater => {
3513 indent_from_prev_row = true;
3514 from_regex = true
3515 }
3516 _ => {}
3517 },
3518
3519 Ordering::Greater => break,
3520 Ordering::Less => {}
3521 }
3522
3523 indent_changes.next();
3524 }
3525
3526 for range in &indent_ranges {
3527 if range.start.row >= row {
3528 break;
3529 }
3530 if range.start.row == prev_row && range.end > row_start {
3531 indent_from_prev_row = true;
3532 }
3533 if range.end > prev_row_start && range.end <= row_start {
3534 outdent_to_row = outdent_to_row.min(range.start.row);
3535 }
3536 }
3537
3538 if let Some(basis_row) = regex_outdent_map.get(&row) {
3539 indent_from_prev_row = false;
3540 outdent_to_row = *basis_row;
3541 from_regex = true;
3542 }
3543
3544 let within_error = error_ranges
3545 .iter()
3546 .any(|e| e.start.row < row && e.end > row_start);
3547
3548 let suggestion = if outdent_to_row == prev_row
3549 || (outdent_from_prev_row && indent_from_prev_row)
3550 {
3551 Some(IndentSuggestion {
3552 basis_row: prev_row,
3553 delta: Ordering::Equal,
3554 within_error: within_error && !from_regex,
3555 })
3556 } else if indent_from_prev_row {
3557 Some(IndentSuggestion {
3558 basis_row: prev_row,
3559 delta: Ordering::Greater,
3560 within_error: within_error && !from_regex,
3561 })
3562 } else if outdent_to_row < prev_row {
3563 Some(IndentSuggestion {
3564 basis_row: outdent_to_row,
3565 delta: Ordering::Equal,
3566 within_error: within_error && !from_regex,
3567 })
3568 } else if outdent_from_prev_row {
3569 Some(IndentSuggestion {
3570 basis_row: prev_row,
3571 delta: Ordering::Less,
3572 within_error: within_error && !from_regex,
3573 })
3574 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3575 {
3576 Some(IndentSuggestion {
3577 basis_row: prev_row,
3578 delta: Ordering::Equal,
3579 within_error: within_error && !from_regex,
3580 })
3581 } else {
3582 None
3583 };
3584
3585 prev_row = row;
3586 prev_row_start = row_start;
3587 suggestion
3588 }))
3589 }
3590
3591 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3592 while row > 0 {
3593 row -= 1;
3594 if !self.is_line_blank(row) {
3595 return Some(row);
3596 }
3597 }
3598 None
3599 }
3600
3601 #[ztracing::instrument(skip_all)]
3602 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3603 let captures = self.syntax.captures(range, &self.text, |grammar| {
3604 grammar
3605 .highlights_config
3606 .as_ref()
3607 .map(|config| &config.query)
3608 });
3609 let highlight_maps = captures
3610 .grammars()
3611 .iter()
3612 .map(|grammar| grammar.highlight_map())
3613 .collect();
3614 (captures, highlight_maps)
3615 }
3616
3617 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3618 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3619 /// returned in chunks where each chunk has a single syntax highlighting style and
3620 /// diagnostic status.
3621 #[ztracing::instrument(skip_all)]
3622 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3623 let range = range.start.to_offset(self)..range.end.to_offset(self);
3624
3625 let mut syntax = None;
3626 if language_aware {
3627 syntax = Some(self.get_highlights(range.clone()));
3628 }
3629 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3630 let diagnostics = language_aware;
3631 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3632 }
3633
3634 pub fn highlighted_text_for_range<T: ToOffset>(
3635 &self,
3636 range: Range<T>,
3637 override_style: Option<HighlightStyle>,
3638 syntax_theme: &SyntaxTheme,
3639 ) -> HighlightedText {
3640 HighlightedText::from_buffer_range(
3641 range,
3642 &self.text,
3643 &self.syntax,
3644 override_style,
3645 syntax_theme,
3646 )
3647 }
3648
3649 /// Invokes the given callback for each line of text in the given range of the buffer.
3650 /// Uses callback to avoid allocating a string for each line.
3651 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3652 let mut line = String::new();
3653 let mut row = range.start.row;
3654 for chunk in self
3655 .as_rope()
3656 .chunks_in_range(range.to_offset(self))
3657 .chain(["\n"])
3658 {
3659 for (newline_ix, text) in chunk.split('\n').enumerate() {
3660 if newline_ix > 0 {
3661 callback(row, &line);
3662 row += 1;
3663 line.clear();
3664 }
3665 line.push_str(text);
3666 }
3667 }
3668 }
3669
3670 /// Iterates over every [`SyntaxLayer`] in the buffer.
3671 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3672 self.syntax_layers_for_range(0..self.len(), true)
3673 }
3674
3675 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3676 let offset = position.to_offset(self);
3677 self.syntax_layers_for_range(offset..offset, false)
3678 .filter(|l| {
3679 if let Some(ranges) = l.included_sub_ranges {
3680 ranges.iter().any(|range| {
3681 let start = range.start.to_offset(self);
3682 start <= offset && {
3683 let end = range.end.to_offset(self);
3684 offset < end
3685 }
3686 })
3687 } else {
3688 l.node().start_byte() <= offset && l.node().end_byte() > offset
3689 }
3690 })
3691 .last()
3692 }
3693
3694 pub fn syntax_layers_for_range<D: ToOffset>(
3695 &self,
3696 range: Range<D>,
3697 include_hidden: bool,
3698 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3699 self.syntax
3700 .layers_for_range(range, &self.text, include_hidden)
3701 }
3702
3703 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3704 &self,
3705 range: Range<D>,
3706 ) -> Option<SyntaxLayer<'_>> {
3707 let range = range.to_offset(self);
3708 self.syntax
3709 .layers_for_range(range, &self.text, false)
3710 .max_by(|a, b| {
3711 if a.depth != b.depth {
3712 a.depth.cmp(&b.depth)
3713 } else if a.offset.0 != b.offset.0 {
3714 a.offset.0.cmp(&b.offset.0)
3715 } else {
3716 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3717 }
3718 })
3719 }
3720
3721 /// Returns the main [`Language`].
3722 pub fn language(&self) -> Option<&Arc<Language>> {
3723 self.language.as_ref()
3724 }
3725
3726 /// Returns the [`Language`] at the given location.
3727 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3728 self.syntax_layer_at(position)
3729 .map(|info| info.language)
3730 .or(self.language.as_ref())
3731 }
3732
3733 /// Returns the settings for the language at the given location.
3734 pub fn settings_at<'a, D: ToOffset>(
3735 &'a self,
3736 position: D,
3737 cx: &'a App,
3738 ) -> Cow<'a, LanguageSettings> {
3739 language_settings(
3740 self.language_at(position).map(|l| l.name()),
3741 self.file.as_ref(),
3742 cx,
3743 )
3744 }
3745
3746 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3747 CharClassifier::new(self.language_scope_at(point))
3748 }
3749
3750 /// Returns the [`LanguageScope`] at the given location.
3751 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3752 let offset = position.to_offset(self);
3753 let mut scope = None;
3754 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3755
3756 // Use the layer that has the smallest node intersecting the given point.
3757 for layer in self
3758 .syntax
3759 .layers_for_range(offset..offset, &self.text, false)
3760 {
3761 let mut cursor = layer.node().walk();
3762
3763 let mut range = None;
3764 loop {
3765 let child_range = cursor.node().byte_range();
3766 if !child_range.contains(&offset) {
3767 break;
3768 }
3769
3770 range = Some(child_range);
3771 if cursor.goto_first_child_for_byte(offset).is_none() {
3772 break;
3773 }
3774 }
3775
3776 if let Some(range) = range
3777 && smallest_range_and_depth.as_ref().is_none_or(
3778 |(smallest_range, smallest_range_depth)| {
3779 if layer.depth > *smallest_range_depth {
3780 true
3781 } else if layer.depth == *smallest_range_depth {
3782 range.len() < smallest_range.len()
3783 } else {
3784 false
3785 }
3786 },
3787 )
3788 {
3789 smallest_range_and_depth = Some((range, layer.depth));
3790 scope = Some(LanguageScope {
3791 language: layer.language.clone(),
3792 override_id: layer.override_id(offset, &self.text),
3793 });
3794 }
3795 }
3796
3797 scope.or_else(|| {
3798 self.language.clone().map(|language| LanguageScope {
3799 language,
3800 override_id: None,
3801 })
3802 })
3803 }
3804
3805 /// Returns a tuple of the range and character kind of the word
3806 /// surrounding the given position.
3807 pub fn surrounding_word<T: ToOffset>(
3808 &self,
3809 start: T,
3810 scope_context: Option<CharScopeContext>,
3811 ) -> (Range<usize>, Option<CharKind>) {
3812 let mut start = start.to_offset(self);
3813 let mut end = start;
3814 let mut next_chars = self.chars_at(start).take(128).peekable();
3815 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3816
3817 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3818 let word_kind = cmp::max(
3819 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3820 next_chars.peek().copied().map(|c| classifier.kind(c)),
3821 );
3822
3823 for ch in prev_chars {
3824 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3825 start -= ch.len_utf8();
3826 } else {
3827 break;
3828 }
3829 }
3830
3831 for ch in next_chars {
3832 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3833 end += ch.len_utf8();
3834 } else {
3835 break;
3836 }
3837 }
3838
3839 (start..end, word_kind)
3840 }
3841
3842 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3843 /// range. When `require_larger` is true, the node found must be larger than the query range.
3844 ///
3845 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3846 /// be moved to the root of the tree.
3847 fn goto_node_enclosing_range(
3848 cursor: &mut tree_sitter::TreeCursor,
3849 query_range: &Range<usize>,
3850 require_larger: bool,
3851 ) -> bool {
3852 let mut ascending = false;
3853 loop {
3854 let mut range = cursor.node().byte_range();
3855 if query_range.is_empty() {
3856 // When the query range is empty and the current node starts after it, move to the
3857 // previous sibling to find the node the containing node.
3858 if range.start > query_range.start {
3859 cursor.goto_previous_sibling();
3860 range = cursor.node().byte_range();
3861 }
3862 } else {
3863 // When the query range is non-empty and the current node ends exactly at the start,
3864 // move to the next sibling to find a node that extends beyond the start.
3865 if range.end == query_range.start {
3866 cursor.goto_next_sibling();
3867 range = cursor.node().byte_range();
3868 }
3869 }
3870
3871 let encloses = range.contains_inclusive(query_range)
3872 && (!require_larger || range.len() > query_range.len());
3873 if !encloses {
3874 ascending = true;
3875 if !cursor.goto_parent() {
3876 return false;
3877 }
3878 continue;
3879 } else if ascending {
3880 return true;
3881 }
3882
3883 // Descend into the current node.
3884 if cursor
3885 .goto_first_child_for_byte(query_range.start)
3886 .is_none()
3887 {
3888 return true;
3889 }
3890 }
3891 }
3892
3893 pub fn syntax_ancestor<'a, T: ToOffset>(
3894 &'a self,
3895 range: Range<T>,
3896 ) -> Option<tree_sitter::Node<'a>> {
3897 let range = range.start.to_offset(self)..range.end.to_offset(self);
3898 let mut result: Option<tree_sitter::Node<'a>> = None;
3899 for layer in self
3900 .syntax
3901 .layers_for_range(range.clone(), &self.text, true)
3902 {
3903 let mut cursor = layer.node().walk();
3904
3905 // Find the node that both contains the range and is larger than it.
3906 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3907 continue;
3908 }
3909
3910 let left_node = cursor.node();
3911 let mut layer_result = left_node;
3912
3913 // For an empty range, try to find another node immediately to the right of the range.
3914 if left_node.end_byte() == range.start {
3915 let mut right_node = None;
3916 while !cursor.goto_next_sibling() {
3917 if !cursor.goto_parent() {
3918 break;
3919 }
3920 }
3921
3922 while cursor.node().start_byte() == range.start {
3923 right_node = Some(cursor.node());
3924 if !cursor.goto_first_child() {
3925 break;
3926 }
3927 }
3928
3929 // If there is a candidate node on both sides of the (empty) range, then
3930 // decide between the two by favoring a named node over an anonymous token.
3931 // If both nodes are the same in that regard, favor the right one.
3932 if let Some(right_node) = right_node
3933 && (right_node.is_named() || !left_node.is_named())
3934 {
3935 layer_result = right_node;
3936 }
3937 }
3938
3939 if let Some(previous_result) = &result
3940 && previous_result.byte_range().len() < layer_result.byte_range().len()
3941 {
3942 continue;
3943 }
3944 result = Some(layer_result);
3945 }
3946
3947 result
3948 }
3949
3950 /// Find the previous sibling syntax node at the given range.
3951 ///
3952 /// This function locates the syntax node that precedes the node containing
3953 /// the given range. It searches hierarchically by:
3954 /// 1. Finding the node that contains the given range
3955 /// 2. Looking for the previous sibling at the same tree level
3956 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3957 ///
3958 /// Returns `None` if there is no previous sibling at any ancestor level.
3959 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3960 &'a self,
3961 range: Range<T>,
3962 ) -> Option<tree_sitter::Node<'a>> {
3963 let range = range.start.to_offset(self)..range.end.to_offset(self);
3964 let mut result: Option<tree_sitter::Node<'a>> = None;
3965
3966 for layer in self
3967 .syntax
3968 .layers_for_range(range.clone(), &self.text, true)
3969 {
3970 let mut cursor = layer.node().walk();
3971
3972 // Find the node that contains the range
3973 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3974 continue;
3975 }
3976
3977 // Look for the previous sibling, moving up ancestor levels if needed
3978 loop {
3979 if cursor.goto_previous_sibling() {
3980 let layer_result = cursor.node();
3981
3982 if let Some(previous_result) = &result {
3983 if previous_result.byte_range().end < layer_result.byte_range().end {
3984 continue;
3985 }
3986 }
3987 result = Some(layer_result);
3988 break;
3989 }
3990
3991 // No sibling found at this level, try moving up to parent
3992 if !cursor.goto_parent() {
3993 break;
3994 }
3995 }
3996 }
3997
3998 result
3999 }
4000
4001 /// Find the next sibling syntax node at the given range.
4002 ///
4003 /// This function locates the syntax node that follows the node containing
4004 /// the given range. It searches hierarchically by:
4005 /// 1. Finding the node that contains the given range
4006 /// 2. Looking for the next sibling at the same tree level
4007 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4008 ///
4009 /// Returns `None` if there is no next sibling at any ancestor level.
4010 pub fn syntax_next_sibling<'a, T: ToOffset>(
4011 &'a self,
4012 range: Range<T>,
4013 ) -> Option<tree_sitter::Node<'a>> {
4014 let range = range.start.to_offset(self)..range.end.to_offset(self);
4015 let mut result: Option<tree_sitter::Node<'a>> = None;
4016
4017 for layer in self
4018 .syntax
4019 .layers_for_range(range.clone(), &self.text, true)
4020 {
4021 let mut cursor = layer.node().walk();
4022
4023 // Find the node that contains the range
4024 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4025 continue;
4026 }
4027
4028 // Look for the next sibling, moving up ancestor levels if needed
4029 loop {
4030 if cursor.goto_next_sibling() {
4031 let layer_result = cursor.node();
4032
4033 if let Some(previous_result) = &result {
4034 if previous_result.byte_range().start > layer_result.byte_range().start {
4035 continue;
4036 }
4037 }
4038 result = Some(layer_result);
4039 break;
4040 }
4041
4042 // No sibling found at this level, try moving up to parent
4043 if !cursor.goto_parent() {
4044 break;
4045 }
4046 }
4047 }
4048
4049 result
4050 }
4051
4052 /// Returns the root syntax node within the given row
4053 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4054 let start_offset = position.to_offset(self);
4055
4056 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4057
4058 let layer = self
4059 .syntax
4060 .layers_for_range(start_offset..start_offset, &self.text, true)
4061 .next()?;
4062
4063 let mut cursor = layer.node().walk();
4064
4065 // Descend to the first leaf that touches the start of the range.
4066 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4067 if cursor.node().end_byte() == start_offset {
4068 cursor.goto_next_sibling();
4069 }
4070 }
4071
4072 // Ascend to the root node within the same row.
4073 while cursor.goto_parent() {
4074 if cursor.node().start_position().row != row {
4075 break;
4076 }
4077 }
4078
4079 Some(cursor.node())
4080 }
4081
4082 /// Returns the outline for the buffer.
4083 ///
4084 /// This method allows passing an optional [`SyntaxTheme`] to
4085 /// syntax-highlight the returned symbols.
4086 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4087 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4088 }
4089
4090 /// Returns all the symbols that contain the given position.
4091 ///
4092 /// This method allows passing an optional [`SyntaxTheme`] to
4093 /// syntax-highlight the returned symbols.
4094 pub fn symbols_containing<T: ToOffset>(
4095 &self,
4096 position: T,
4097 theme: Option<&SyntaxTheme>,
4098 ) -> Vec<OutlineItem<Anchor>> {
4099 let position = position.to_offset(self);
4100 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4101 let end = self.clip_offset(position + 1, Bias::Right);
4102 let mut items = self.outline_items_containing(start..end, false, theme);
4103 let mut prev_depth = None;
4104 items.retain(|item| {
4105 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4106 prev_depth = Some(item.depth);
4107 result
4108 });
4109 items
4110 }
4111
4112 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4113 let range = range.to_offset(self);
4114 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4115 grammar.outline_config.as_ref().map(|c| &c.query)
4116 });
4117 let configs = matches
4118 .grammars()
4119 .iter()
4120 .map(|g| g.outline_config.as_ref().unwrap())
4121 .collect::<Vec<_>>();
4122
4123 while let Some(mat) = matches.peek() {
4124 let config = &configs[mat.grammar_index];
4125 let containing_item_node = maybe!({
4126 let item_node = mat.captures.iter().find_map(|cap| {
4127 if cap.index == config.item_capture_ix {
4128 Some(cap.node)
4129 } else {
4130 None
4131 }
4132 })?;
4133
4134 let item_byte_range = item_node.byte_range();
4135 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4136 None
4137 } else {
4138 Some(item_node)
4139 }
4140 });
4141
4142 if let Some(item_node) = containing_item_node {
4143 return Some(
4144 Point::from_ts_point(item_node.start_position())
4145 ..Point::from_ts_point(item_node.end_position()),
4146 );
4147 }
4148
4149 matches.advance();
4150 }
4151 None
4152 }
4153
4154 pub fn outline_items_containing<T: ToOffset>(
4155 &self,
4156 range: Range<T>,
4157 include_extra_context: bool,
4158 theme: Option<&SyntaxTheme>,
4159 ) -> Vec<OutlineItem<Anchor>> {
4160 self.outline_items_containing_internal(
4161 range,
4162 include_extra_context,
4163 theme,
4164 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4165 )
4166 }
4167
4168 pub fn outline_items_as_points_containing<T: ToOffset>(
4169 &self,
4170 range: Range<T>,
4171 include_extra_context: bool,
4172 theme: Option<&SyntaxTheme>,
4173 ) -> Vec<OutlineItem<Point>> {
4174 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4175 range
4176 })
4177 }
4178
4179 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4180 &self,
4181 range: Range<T>,
4182 include_extra_context: bool,
4183 theme: Option<&SyntaxTheme>,
4184 ) -> Vec<OutlineItem<usize>> {
4185 self.outline_items_containing_internal(
4186 range,
4187 include_extra_context,
4188 theme,
4189 |buffer, range| range.to_offset(buffer),
4190 )
4191 }
4192
4193 fn outline_items_containing_internal<T: ToOffset, U>(
4194 &self,
4195 range: Range<T>,
4196 include_extra_context: bool,
4197 theme: Option<&SyntaxTheme>,
4198 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4199 ) -> Vec<OutlineItem<U>> {
4200 let range = range.to_offset(self);
4201 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4202 grammar.outline_config.as_ref().map(|c| &c.query)
4203 });
4204
4205 let mut items = Vec::new();
4206 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4207 while let Some(mat) = matches.peek() {
4208 let config = matches.grammars()[mat.grammar_index]
4209 .outline_config
4210 .as_ref()
4211 .unwrap();
4212 if let Some(item) =
4213 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4214 {
4215 items.push(item);
4216 } else if let Some(capture) = mat
4217 .captures
4218 .iter()
4219 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4220 {
4221 let capture_range = capture.node.start_position()..capture.node.end_position();
4222 let mut capture_row_range =
4223 capture_range.start.row as u32..capture_range.end.row as u32;
4224 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4225 {
4226 capture_row_range.end -= 1;
4227 }
4228 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4229 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4230 last_row_range.end = capture_row_range.end;
4231 } else {
4232 annotation_row_ranges.push(capture_row_range);
4233 }
4234 } else {
4235 annotation_row_ranges.push(capture_row_range);
4236 }
4237 }
4238 matches.advance();
4239 }
4240
4241 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4242
4243 // Assign depths based on containment relationships and convert to anchors.
4244 let mut item_ends_stack = Vec::<Point>::new();
4245 let mut anchor_items = Vec::new();
4246 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4247 for item in items {
4248 while let Some(last_end) = item_ends_stack.last().copied() {
4249 if last_end < item.range.end {
4250 item_ends_stack.pop();
4251 } else {
4252 break;
4253 }
4254 }
4255
4256 let mut annotation_row_range = None;
4257 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4258 let row_preceding_item = item.range.start.row.saturating_sub(1);
4259 if next_annotation_row_range.end < row_preceding_item {
4260 annotation_row_ranges.next();
4261 } else {
4262 if next_annotation_row_range.end == row_preceding_item {
4263 annotation_row_range = Some(next_annotation_row_range.clone());
4264 annotation_row_ranges.next();
4265 }
4266 break;
4267 }
4268 }
4269
4270 anchor_items.push(OutlineItem {
4271 depth: item_ends_stack.len(),
4272 range: range_callback(self, item.range.clone()),
4273 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4274 text: item.text,
4275 highlight_ranges: item.highlight_ranges,
4276 name_ranges: item.name_ranges,
4277 body_range: item.body_range.map(|r| range_callback(self, r)),
4278 annotation_range: annotation_row_range.map(|annotation_range| {
4279 let point_range = Point::new(annotation_range.start, 0)
4280 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4281 range_callback(self, point_range)
4282 }),
4283 });
4284 item_ends_stack.push(item.range.end);
4285 }
4286
4287 anchor_items
4288 }
4289
4290 fn next_outline_item(
4291 &self,
4292 config: &OutlineConfig,
4293 mat: &SyntaxMapMatch,
4294 range: &Range<usize>,
4295 include_extra_context: bool,
4296 theme: Option<&SyntaxTheme>,
4297 ) -> Option<OutlineItem<Point>> {
4298 let item_node = mat.captures.iter().find_map(|cap| {
4299 if cap.index == config.item_capture_ix {
4300 Some(cap.node)
4301 } else {
4302 None
4303 }
4304 })?;
4305
4306 let item_byte_range = item_node.byte_range();
4307 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4308 return None;
4309 }
4310 let item_point_range = Point::from_ts_point(item_node.start_position())
4311 ..Point::from_ts_point(item_node.end_position());
4312
4313 let mut open_point = None;
4314 let mut close_point = None;
4315
4316 let mut buffer_ranges = Vec::new();
4317 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4318 let mut range = node.start_byte()..node.end_byte();
4319 let start = node.start_position();
4320 if node.end_position().row > start.row {
4321 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4322 }
4323
4324 if !range.is_empty() {
4325 buffer_ranges.push((range, node_is_name));
4326 }
4327 };
4328
4329 for capture in mat.captures {
4330 if capture.index == config.name_capture_ix {
4331 add_to_buffer_ranges(capture.node, true);
4332 } else if Some(capture.index) == config.context_capture_ix
4333 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4334 {
4335 add_to_buffer_ranges(capture.node, false);
4336 } else {
4337 if Some(capture.index) == config.open_capture_ix {
4338 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4339 } else if Some(capture.index) == config.close_capture_ix {
4340 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4341 }
4342 }
4343 }
4344
4345 if buffer_ranges.is_empty() {
4346 return None;
4347 }
4348 let source_range_for_text =
4349 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4350
4351 let mut text = String::new();
4352 let mut highlight_ranges = Vec::new();
4353 let mut name_ranges = Vec::new();
4354 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4355 let mut last_buffer_range_end = 0;
4356 for (buffer_range, is_name) in buffer_ranges {
4357 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4358 if space_added {
4359 text.push(' ');
4360 }
4361 let before_append_len = text.len();
4362 let mut offset = buffer_range.start;
4363 chunks.seek(buffer_range.clone());
4364 for mut chunk in chunks.by_ref() {
4365 if chunk.text.len() > buffer_range.end - offset {
4366 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4367 offset = buffer_range.end;
4368 } else {
4369 offset += chunk.text.len();
4370 }
4371 let style = chunk
4372 .syntax_highlight_id
4373 .zip(theme)
4374 .and_then(|(highlight, theme)| highlight.style(theme));
4375 if let Some(style) = style {
4376 let start = text.len();
4377 let end = start + chunk.text.len();
4378 highlight_ranges.push((start..end, style));
4379 }
4380 text.push_str(chunk.text);
4381 if offset >= buffer_range.end {
4382 break;
4383 }
4384 }
4385 if is_name {
4386 let after_append_len = text.len();
4387 let start = if space_added && !name_ranges.is_empty() {
4388 before_append_len - 1
4389 } else {
4390 before_append_len
4391 };
4392 name_ranges.push(start..after_append_len);
4393 }
4394 last_buffer_range_end = buffer_range.end;
4395 }
4396
4397 Some(OutlineItem {
4398 depth: 0, // We'll calculate the depth later
4399 range: item_point_range,
4400 source_range_for_text: source_range_for_text.to_point(self),
4401 text,
4402 highlight_ranges,
4403 name_ranges,
4404 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4405 annotation_range: None,
4406 })
4407 }
4408
4409 pub fn function_body_fold_ranges<T: ToOffset>(
4410 &self,
4411 within: Range<T>,
4412 ) -> impl Iterator<Item = Range<usize>> + '_ {
4413 self.text_object_ranges(within, TreeSitterOptions::default())
4414 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4415 }
4416
4417 /// For each grammar in the language, runs the provided
4418 /// [`tree_sitter::Query`] against the given range.
4419 pub fn matches(
4420 &self,
4421 range: Range<usize>,
4422 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4423 ) -> SyntaxMapMatches<'_> {
4424 self.syntax.matches(range, self, query)
4425 }
4426
4427 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4428 /// Hence, may return more bracket pairs than the range contains.
4429 ///
4430 /// Will omit known chunks.
4431 /// The resulting bracket match collections are not ordered.
4432 pub fn fetch_bracket_ranges(
4433 &self,
4434 range: Range<usize>,
4435 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4436 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4437 let mut all_bracket_matches = HashMap::default();
4438
4439 for chunk in self
4440 .tree_sitter_data
4441 .chunks
4442 .applicable_chunks(&[range.to_point(self)])
4443 {
4444 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4445 continue;
4446 }
4447 let chunk_range = chunk.anchor_range();
4448 let chunk_range = chunk_range.to_offset(&self);
4449
4450 if let Some(cached_brackets) =
4451 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4452 {
4453 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4454 continue;
4455 }
4456
4457 let mut all_brackets: Vec<(BracketMatch<usize>, bool)> = Vec::new();
4458 let mut opens = Vec::new();
4459 let mut color_pairs = Vec::new();
4460
4461 let mut matches = self.syntax.matches_with_options(
4462 chunk_range.clone(),
4463 &self.text,
4464 TreeSitterOptions {
4465 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4466 max_start_depth: None,
4467 },
4468 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4469 );
4470 let configs = matches
4471 .grammars()
4472 .iter()
4473 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4474 .collect::<Vec<_>>();
4475
4476 // Group matches by open range so we can either trust grammar output
4477 // or repair it by picking a single closest close per open.
4478 let mut open_to_close_ranges = BTreeMap::new();
4479 while let Some(mat) = matches.peek() {
4480 let mut open = None;
4481 let mut close = None;
4482 let syntax_layer_depth = mat.depth;
4483 let config = configs[mat.grammar_index];
4484 let pattern = &config.patterns[mat.pattern_index];
4485 for capture in mat.captures {
4486 if capture.index == config.open_capture_ix {
4487 open = Some(capture.node.byte_range());
4488 } else if capture.index == config.close_capture_ix {
4489 close = Some(capture.node.byte_range());
4490 }
4491 }
4492
4493 matches.advance();
4494
4495 let Some((open_range, close_range)) = open.zip(close) else {
4496 continue;
4497 };
4498
4499 let bracket_range = open_range.start..=close_range.end;
4500 if !bracket_range.overlaps(&chunk_range) {
4501 continue;
4502 }
4503
4504 open_to_close_ranges
4505 .entry((open_range.start, open_range.end))
4506 .or_insert_with(BTreeMap::new)
4507 .insert(
4508 (close_range.start, close_range.end),
4509 BracketMatch {
4510 open_range: open_range.clone(),
4511 close_range: close_range.clone(),
4512 syntax_layer_depth,
4513 newline_only: pattern.newline_only,
4514 color_index: None,
4515 },
4516 );
4517
4518 all_brackets.push((
4519 BracketMatch {
4520 open_range,
4521 close_range,
4522 syntax_layer_depth,
4523 newline_only: pattern.newline_only,
4524 color_index: None,
4525 },
4526 pattern.rainbow_exclude,
4527 ));
4528 }
4529
4530 let has_bogus_matches = open_to_close_ranges
4531 .iter()
4532 .any(|(_, end_ranges)| end_ranges.len() > 1);
4533 if has_bogus_matches {
4534 // Grammar is producing bogus matches where one open is paired with multiple
4535 // closes. Build a valid stack by walking through positions in order.
4536 // For each close, we know the expected open_len from tree-sitter matches.
4537
4538 // Map each close to its expected open length (for inferring opens)
4539 let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets
4540 .iter()
4541 .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len()))
4542 .collect();
4543
4544 // Collect unique opens and closes within this chunk
4545 let mut unique_opens: HashSet<(usize, usize)> = all_brackets
4546 .iter()
4547 .map(|(m, _)| (m.open_range.start, m.open_range.end))
4548 .filter(|(start, _)| chunk_range.contains(start))
4549 .collect();
4550
4551 let mut unique_closes: Vec<(usize, usize)> = all_brackets
4552 .iter()
4553 .map(|(m, _)| (m.close_range.start, m.close_range.end))
4554 .filter(|(start, _)| chunk_range.contains(start))
4555 .collect();
4556 unique_closes.sort();
4557 unique_closes.dedup();
4558
4559 // Build valid pairs by walking through closes in order
4560 let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4561 unique_opens_vec.sort();
4562
4563 let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default();
4564 let mut open_stack: Vec<(usize, usize)> = Vec::new();
4565 let mut open_idx = 0;
4566
4567 for close in &unique_closes {
4568 // Push all opens before this close onto stack
4569 while open_idx < unique_opens_vec.len()
4570 && unique_opens_vec[open_idx].0 < close.0
4571 {
4572 open_stack.push(unique_opens_vec[open_idx]);
4573 open_idx += 1;
4574 }
4575
4576 // Try to match with most recent open
4577 if let Some(open) = open_stack.pop() {
4578 valid_pairs.insert((open, *close));
4579 } else if let Some(&open_len) = close_to_open_len.get(close) {
4580 // No open on stack - infer one based on expected open_len
4581 if close.0 >= open_len {
4582 let inferred = (close.0 - open_len, close.0);
4583 unique_opens.insert(inferred);
4584 valid_pairs.insert((inferred, *close));
4585 all_brackets.push((
4586 BracketMatch {
4587 open_range: inferred.0..inferred.1,
4588 close_range: close.0..close.1,
4589 newline_only: false,
4590 syntax_layer_depth: 0,
4591 color_index: None,
4592 },
4593 false,
4594 ));
4595 }
4596 }
4597 }
4598
4599 all_brackets.retain(|(m, _)| {
4600 let open = (m.open_range.start, m.open_range.end);
4601 let close = (m.close_range.start, m.close_range.end);
4602 valid_pairs.contains(&(open, close))
4603 });
4604 }
4605
4606 let mut all_brackets = all_brackets
4607 .into_iter()
4608 .enumerate()
4609 .map(|(index, (bracket_match, rainbow_exclude))| {
4610 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4611 // bracket will match the entire tag with all text inside.
4612 // For now, avoid highlighting any pair that has more than single char in each bracket.
4613 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4614 let should_color = !rainbow_exclude
4615 && (bracket_match.open_range.len() == 1
4616 || bracket_match.close_range.len() == 1);
4617 if should_color {
4618 opens.push(bracket_match.open_range.clone());
4619 color_pairs.push((
4620 bracket_match.open_range.clone(),
4621 bracket_match.close_range.clone(),
4622 index,
4623 ));
4624 }
4625 bracket_match
4626 })
4627 .collect::<Vec<_>>();
4628
4629 opens.sort_by_key(|r| (r.start, r.end));
4630 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4631 color_pairs.sort_by_key(|(_, close, _)| close.end);
4632
4633 let mut open_stack = Vec::new();
4634 let mut open_index = 0;
4635 for (open, close, index) in color_pairs {
4636 while open_index < opens.len() && opens[open_index].start < close.start {
4637 open_stack.push(opens[open_index].clone());
4638 open_index += 1;
4639 }
4640
4641 if open_stack.last() == Some(&open) {
4642 let depth_index = open_stack.len() - 1;
4643 all_brackets[index].color_index = Some(depth_index);
4644 open_stack.pop();
4645 }
4646 }
4647
4648 all_brackets.sort_by_key(|bracket_match| {
4649 (bracket_match.open_range.start, bracket_match.open_range.end)
4650 });
4651
4652 if let empty_slot @ None =
4653 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4654 {
4655 *empty_slot = Some(all_brackets.clone());
4656 }
4657 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4658 }
4659
4660 all_bracket_matches
4661 }
4662
4663 pub fn all_bracket_ranges(
4664 &self,
4665 range: Range<usize>,
4666 ) -> impl Iterator<Item = BracketMatch<usize>> {
4667 self.fetch_bracket_ranges(range.clone(), None)
4668 .into_values()
4669 .flatten()
4670 .filter(move |bracket_match| {
4671 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4672 bracket_range.overlaps(&range)
4673 })
4674 }
4675
4676 /// Returns bracket range pairs overlapping or adjacent to `range`
4677 pub fn bracket_ranges<T: ToOffset>(
4678 &self,
4679 range: Range<T>,
4680 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4681 // Find bracket pairs that *inclusively* contain the given range.
4682 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4683 self.all_bracket_ranges(range)
4684 .filter(|pair| !pair.newline_only)
4685 }
4686
4687 pub fn debug_variables_query<T: ToOffset>(
4688 &self,
4689 range: Range<T>,
4690 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4691 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4692
4693 let mut matches = self.syntax.matches_with_options(
4694 range.clone(),
4695 &self.text,
4696 TreeSitterOptions::default(),
4697 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4698 );
4699
4700 let configs = matches
4701 .grammars()
4702 .iter()
4703 .map(|grammar| grammar.debug_variables_config.as_ref())
4704 .collect::<Vec<_>>();
4705
4706 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4707
4708 iter::from_fn(move || {
4709 loop {
4710 while let Some(capture) = captures.pop() {
4711 if capture.0.overlaps(&range) {
4712 return Some(capture);
4713 }
4714 }
4715
4716 let mat = matches.peek()?;
4717
4718 let Some(config) = configs[mat.grammar_index].as_ref() else {
4719 matches.advance();
4720 continue;
4721 };
4722
4723 for capture in mat.captures {
4724 let Some(ix) = config
4725 .objects_by_capture_ix
4726 .binary_search_by_key(&capture.index, |e| e.0)
4727 .ok()
4728 else {
4729 continue;
4730 };
4731 let text_object = config.objects_by_capture_ix[ix].1;
4732 let byte_range = capture.node.byte_range();
4733
4734 let mut found = false;
4735 for (range, existing) in captures.iter_mut() {
4736 if existing == &text_object {
4737 range.start = range.start.min(byte_range.start);
4738 range.end = range.end.max(byte_range.end);
4739 found = true;
4740 break;
4741 }
4742 }
4743
4744 if !found {
4745 captures.push((byte_range, text_object));
4746 }
4747 }
4748
4749 matches.advance();
4750 }
4751 })
4752 }
4753
4754 pub fn text_object_ranges<T: ToOffset>(
4755 &self,
4756 range: Range<T>,
4757 options: TreeSitterOptions,
4758 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4759 let range =
4760 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4761
4762 let mut matches =
4763 self.syntax
4764 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4765 grammar.text_object_config.as_ref().map(|c| &c.query)
4766 });
4767
4768 let configs = matches
4769 .grammars()
4770 .iter()
4771 .map(|grammar| grammar.text_object_config.as_ref())
4772 .collect::<Vec<_>>();
4773
4774 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4775
4776 iter::from_fn(move || {
4777 loop {
4778 while let Some(capture) = captures.pop() {
4779 if capture.0.overlaps(&range) {
4780 return Some(capture);
4781 }
4782 }
4783
4784 let mat = matches.peek()?;
4785
4786 let Some(config) = configs[mat.grammar_index].as_ref() else {
4787 matches.advance();
4788 continue;
4789 };
4790
4791 for capture in mat.captures {
4792 let Some(ix) = config
4793 .text_objects_by_capture_ix
4794 .binary_search_by_key(&capture.index, |e| e.0)
4795 .ok()
4796 else {
4797 continue;
4798 };
4799 let text_object = config.text_objects_by_capture_ix[ix].1;
4800 let byte_range = capture.node.byte_range();
4801
4802 let mut found = false;
4803 for (range, existing) in captures.iter_mut() {
4804 if existing == &text_object {
4805 range.start = range.start.min(byte_range.start);
4806 range.end = range.end.max(byte_range.end);
4807 found = true;
4808 break;
4809 }
4810 }
4811
4812 if !found {
4813 captures.push((byte_range, text_object));
4814 }
4815 }
4816
4817 matches.advance();
4818 }
4819 })
4820 }
4821
4822 /// Returns enclosing bracket ranges containing the given range
4823 pub fn enclosing_bracket_ranges<T: ToOffset>(
4824 &self,
4825 range: Range<T>,
4826 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4827 let range = range.start.to_offset(self)..range.end.to_offset(self);
4828
4829 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4830 let max_depth = result
4831 .iter()
4832 .map(|mat| mat.syntax_layer_depth)
4833 .max()
4834 .unwrap_or(0);
4835 result.into_iter().filter(move |pair| {
4836 pair.open_range.start <= range.start
4837 && pair.close_range.end >= range.end
4838 && pair.syntax_layer_depth == max_depth
4839 })
4840 }
4841
4842 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4843 ///
4844 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4845 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4846 &self,
4847 range: Range<T>,
4848 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4849 ) -> Option<(Range<usize>, Range<usize>)> {
4850 let range = range.start.to_offset(self)..range.end.to_offset(self);
4851
4852 // Get the ranges of the innermost pair of brackets.
4853 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4854
4855 for pair in self.enclosing_bracket_ranges(range) {
4856 if let Some(range_filter) = range_filter
4857 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4858 {
4859 continue;
4860 }
4861
4862 let len = pair.close_range.end - pair.open_range.start;
4863
4864 if let Some((existing_open, existing_close)) = &result {
4865 let existing_len = existing_close.end - existing_open.start;
4866 if len > existing_len {
4867 continue;
4868 }
4869 }
4870
4871 result = Some((pair.open_range, pair.close_range));
4872 }
4873
4874 result
4875 }
4876
4877 /// Returns anchor ranges for any matches of the redaction query.
4878 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4879 /// will be run on the relevant section of the buffer.
4880 pub fn redacted_ranges<T: ToOffset>(
4881 &self,
4882 range: Range<T>,
4883 ) -> impl Iterator<Item = Range<usize>> + '_ {
4884 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4885 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4886 grammar
4887 .redactions_config
4888 .as_ref()
4889 .map(|config| &config.query)
4890 });
4891
4892 let configs = syntax_matches
4893 .grammars()
4894 .iter()
4895 .map(|grammar| grammar.redactions_config.as_ref())
4896 .collect::<Vec<_>>();
4897
4898 iter::from_fn(move || {
4899 let redacted_range = syntax_matches
4900 .peek()
4901 .and_then(|mat| {
4902 configs[mat.grammar_index].and_then(|config| {
4903 mat.captures
4904 .iter()
4905 .find(|capture| capture.index == config.redaction_capture_ix)
4906 })
4907 })
4908 .map(|mat| mat.node.byte_range());
4909 syntax_matches.advance();
4910 redacted_range
4911 })
4912 }
4913
4914 pub fn injections_intersecting_range<T: ToOffset>(
4915 &self,
4916 range: Range<T>,
4917 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4918 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4919
4920 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4921 grammar
4922 .injection_config
4923 .as_ref()
4924 .map(|config| &config.query)
4925 });
4926
4927 let configs = syntax_matches
4928 .grammars()
4929 .iter()
4930 .map(|grammar| grammar.injection_config.as_ref())
4931 .collect::<Vec<_>>();
4932
4933 iter::from_fn(move || {
4934 let ranges = syntax_matches.peek().and_then(|mat| {
4935 let config = &configs[mat.grammar_index]?;
4936 let content_capture_range = mat.captures.iter().find_map(|capture| {
4937 if capture.index == config.content_capture_ix {
4938 Some(capture.node.byte_range())
4939 } else {
4940 None
4941 }
4942 })?;
4943 let language = self.language_at(content_capture_range.start)?;
4944 Some((content_capture_range, language))
4945 });
4946 syntax_matches.advance();
4947 ranges
4948 })
4949 }
4950
4951 pub fn runnable_ranges(
4952 &self,
4953 offset_range: Range<usize>,
4954 ) -> impl Iterator<Item = RunnableRange> + '_ {
4955 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4956 grammar.runnable_config.as_ref().map(|config| &config.query)
4957 });
4958
4959 let test_configs = syntax_matches
4960 .grammars()
4961 .iter()
4962 .map(|grammar| grammar.runnable_config.as_ref())
4963 .collect::<Vec<_>>();
4964
4965 iter::from_fn(move || {
4966 loop {
4967 let mat = syntax_matches.peek()?;
4968
4969 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4970 let mut run_range = None;
4971 let full_range = mat.captures.iter().fold(
4972 Range {
4973 start: usize::MAX,
4974 end: 0,
4975 },
4976 |mut acc, next| {
4977 let byte_range = next.node.byte_range();
4978 if acc.start > byte_range.start {
4979 acc.start = byte_range.start;
4980 }
4981 if acc.end < byte_range.end {
4982 acc.end = byte_range.end;
4983 }
4984 acc
4985 },
4986 );
4987 if full_range.start > full_range.end {
4988 // We did not find a full spanning range of this match.
4989 return None;
4990 }
4991 let extra_captures: SmallVec<[_; 1]> =
4992 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4993 test_configs
4994 .extra_captures
4995 .get(capture.index as usize)
4996 .cloned()
4997 .and_then(|tag_name| match tag_name {
4998 RunnableCapture::Named(name) => {
4999 Some((capture.node.byte_range(), name))
5000 }
5001 RunnableCapture::Run => {
5002 let _ = run_range.insert(capture.node.byte_range());
5003 None
5004 }
5005 })
5006 }));
5007 let run_range = run_range?;
5008 let tags = test_configs
5009 .query
5010 .property_settings(mat.pattern_index)
5011 .iter()
5012 .filter_map(|property| {
5013 if *property.key == *"tag" {
5014 property
5015 .value
5016 .as_ref()
5017 .map(|value| RunnableTag(value.to_string().into()))
5018 } else {
5019 None
5020 }
5021 })
5022 .collect();
5023 let extra_captures = extra_captures
5024 .into_iter()
5025 .map(|(range, name)| {
5026 (
5027 name.to_string(),
5028 self.text_for_range(range).collect::<String>(),
5029 )
5030 })
5031 .collect();
5032 // All tags should have the same range.
5033 Some(RunnableRange {
5034 run_range,
5035 full_range,
5036 runnable: Runnable {
5037 tags,
5038 language: mat.language,
5039 buffer: self.remote_id(),
5040 },
5041 extra_captures,
5042 buffer_id: self.remote_id(),
5043 })
5044 });
5045
5046 syntax_matches.advance();
5047 if test_range.is_some() {
5048 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5049 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5050 return test_range;
5051 }
5052 }
5053 })
5054 }
5055
5056 /// Returns selections for remote peers intersecting the given range.
5057 #[allow(clippy::type_complexity)]
5058 pub fn selections_in_range(
5059 &self,
5060 range: Range<Anchor>,
5061 include_local: bool,
5062 ) -> impl Iterator<
5063 Item = (
5064 ReplicaId,
5065 bool,
5066 CursorShape,
5067 impl Iterator<Item = &Selection<Anchor>> + '_,
5068 ),
5069 > + '_ {
5070 self.remote_selections
5071 .iter()
5072 .filter(move |(replica_id, set)| {
5073 (include_local || **replica_id != self.text.replica_id())
5074 && !set.selections.is_empty()
5075 })
5076 .map(move |(replica_id, set)| {
5077 let start_ix = match set.selections.binary_search_by(|probe| {
5078 probe.end.cmp(&range.start, self).then(Ordering::Greater)
5079 }) {
5080 Ok(ix) | Err(ix) => ix,
5081 };
5082 let end_ix = match set.selections.binary_search_by(|probe| {
5083 probe.start.cmp(&range.end, self).then(Ordering::Less)
5084 }) {
5085 Ok(ix) | Err(ix) => ix,
5086 };
5087
5088 (
5089 *replica_id,
5090 set.line_mode,
5091 set.cursor_shape,
5092 set.selections[start_ix..end_ix].iter(),
5093 )
5094 })
5095 }
5096
5097 /// Returns if the buffer contains any diagnostics.
5098 pub fn has_diagnostics(&self) -> bool {
5099 !self.diagnostics.is_empty()
5100 }
5101
5102 /// Returns all the diagnostics intersecting the given range.
5103 pub fn diagnostics_in_range<'a, T, O>(
5104 &'a self,
5105 search_range: Range<T>,
5106 reversed: bool,
5107 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5108 where
5109 T: 'a + Clone + ToOffset,
5110 O: 'a + FromAnchor,
5111 {
5112 let mut iterators: Vec<_> = self
5113 .diagnostics
5114 .iter()
5115 .map(|(_, collection)| {
5116 collection
5117 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5118 .peekable()
5119 })
5120 .collect();
5121
5122 std::iter::from_fn(move || {
5123 let (next_ix, _) = iterators
5124 .iter_mut()
5125 .enumerate()
5126 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5127 .min_by(|(_, a), (_, b)| {
5128 let cmp = a
5129 .range
5130 .start
5131 .cmp(&b.range.start, self)
5132 // when range is equal, sort by diagnostic severity
5133 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5134 // and stabilize order with group_id
5135 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5136 if reversed { cmp.reverse() } else { cmp }
5137 })?;
5138 iterators[next_ix]
5139 .next()
5140 .map(
5141 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5142 diagnostic,
5143 range: FromAnchor::from_anchor(&range.start, self)
5144 ..FromAnchor::from_anchor(&range.end, self),
5145 },
5146 )
5147 })
5148 }
5149
5150 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5151 /// should be used instead.
5152 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5153 &self.diagnostics
5154 }
5155
5156 /// Returns all the diagnostic groups associated with the given
5157 /// language server ID. If no language server ID is provided,
5158 /// all diagnostics groups are returned.
5159 pub fn diagnostic_groups(
5160 &self,
5161 language_server_id: Option<LanguageServerId>,
5162 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5163 let mut groups = Vec::new();
5164
5165 if let Some(language_server_id) = language_server_id {
5166 if let Ok(ix) = self
5167 .diagnostics
5168 .binary_search_by_key(&language_server_id, |e| e.0)
5169 {
5170 self.diagnostics[ix]
5171 .1
5172 .groups(language_server_id, &mut groups, self);
5173 }
5174 } else {
5175 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5176 diagnostics.groups(*language_server_id, &mut groups, self);
5177 }
5178 }
5179
5180 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5181 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5182 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5183 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5184 });
5185
5186 groups
5187 }
5188
5189 /// Returns an iterator over the diagnostics for the given group.
5190 pub fn diagnostic_group<O>(
5191 &self,
5192 group_id: usize,
5193 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5194 where
5195 O: FromAnchor + 'static,
5196 {
5197 self.diagnostics
5198 .iter()
5199 .flat_map(move |(_, set)| set.group(group_id, self))
5200 }
5201
5202 /// An integer version number that accounts for all updates besides
5203 /// the buffer's text itself (which is versioned via a version vector).
5204 pub fn non_text_state_update_count(&self) -> usize {
5205 self.non_text_state_update_count
5206 }
5207
5208 /// An integer version that changes when the buffer's syntax changes.
5209 pub fn syntax_update_count(&self) -> usize {
5210 self.syntax.update_count()
5211 }
5212
5213 /// Returns a snapshot of underlying file.
5214 pub fn file(&self) -> Option<&Arc<dyn File>> {
5215 self.file.as_ref()
5216 }
5217
5218 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5219 if let Some(file) = self.file() {
5220 if file.path().file_name().is_none() || include_root {
5221 Some(file.full_path(cx).to_string_lossy().into_owned())
5222 } else {
5223 Some(file.path().display(file.path_style(cx)).to_string())
5224 }
5225 } else {
5226 None
5227 }
5228 }
5229
5230 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5231 let query_str = query.fuzzy_contents;
5232 if query_str.is_some_and(|query| query.is_empty()) {
5233 return BTreeMap::default();
5234 }
5235
5236 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5237 language,
5238 override_id: None,
5239 }));
5240
5241 let mut query_ix = 0;
5242 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5243 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5244
5245 let mut words = BTreeMap::default();
5246 let mut current_word_start_ix = None;
5247 let mut chunk_ix = query.range.start;
5248 for chunk in self.chunks(query.range, false) {
5249 for (i, c) in chunk.text.char_indices() {
5250 let ix = chunk_ix + i;
5251 if classifier.is_word(c) {
5252 if current_word_start_ix.is_none() {
5253 current_word_start_ix = Some(ix);
5254 }
5255
5256 if let Some(query_chars) = &query_chars
5257 && query_ix < query_len
5258 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5259 {
5260 query_ix += 1;
5261 }
5262 continue;
5263 } else if let Some(word_start) = current_word_start_ix.take()
5264 && query_ix == query_len
5265 {
5266 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5267 let mut word_text = self.text_for_range(word_start..ix).peekable();
5268 let first_char = word_text
5269 .peek()
5270 .and_then(|first_chunk| first_chunk.chars().next());
5271 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5272 if !query.skip_digits
5273 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5274 {
5275 words.insert(word_text.collect(), word_range);
5276 }
5277 }
5278 query_ix = 0;
5279 }
5280 chunk_ix += chunk.text.len();
5281 }
5282
5283 words
5284 }
5285}
5286
5287pub struct WordsQuery<'a> {
5288 /// Only returns words with all chars from the fuzzy string in them.
5289 pub fuzzy_contents: Option<&'a str>,
5290 /// Skips words that start with a digit.
5291 pub skip_digits: bool,
5292 /// Buffer offset range, to look for words.
5293 pub range: Range<usize>,
5294}
5295
5296fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5297 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5298}
5299
5300fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5301 let mut result = IndentSize::spaces(0);
5302 for c in text {
5303 let kind = match c {
5304 ' ' => IndentKind::Space,
5305 '\t' => IndentKind::Tab,
5306 _ => break,
5307 };
5308 if result.len == 0 {
5309 result.kind = kind;
5310 }
5311 result.len += 1;
5312 }
5313 result
5314}
5315
5316impl Clone for BufferSnapshot {
5317 fn clone(&self) -> Self {
5318 Self {
5319 text: self.text.clone(),
5320 syntax: self.syntax.clone(),
5321 file: self.file.clone(),
5322 remote_selections: self.remote_selections.clone(),
5323 diagnostics: self.diagnostics.clone(),
5324 language: self.language.clone(),
5325 tree_sitter_data: self.tree_sitter_data.clone(),
5326 non_text_state_update_count: self.non_text_state_update_count,
5327 capability: self.capability,
5328 }
5329 }
5330}
5331
5332impl Deref for BufferSnapshot {
5333 type Target = text::BufferSnapshot;
5334
5335 fn deref(&self) -> &Self::Target {
5336 &self.text
5337 }
5338}
5339
5340unsafe impl Send for BufferChunks<'_> {}
5341
5342impl<'a> BufferChunks<'a> {
5343 pub(crate) fn new(
5344 text: &'a Rope,
5345 range: Range<usize>,
5346 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5347 diagnostics: bool,
5348 buffer_snapshot: Option<&'a BufferSnapshot>,
5349 ) -> Self {
5350 let mut highlights = None;
5351 if let Some((captures, highlight_maps)) = syntax {
5352 highlights = Some(BufferChunkHighlights {
5353 captures,
5354 next_capture: None,
5355 stack: Default::default(),
5356 highlight_maps,
5357 })
5358 }
5359
5360 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5361 let chunks = text.chunks_in_range(range.clone());
5362
5363 let mut this = BufferChunks {
5364 range,
5365 buffer_snapshot,
5366 chunks,
5367 diagnostic_endpoints,
5368 error_depth: 0,
5369 warning_depth: 0,
5370 information_depth: 0,
5371 hint_depth: 0,
5372 unnecessary_depth: 0,
5373 underline: true,
5374 highlights,
5375 };
5376 this.initialize_diagnostic_endpoints();
5377 this
5378 }
5379
5380 /// Seeks to the given byte offset in the buffer.
5381 pub fn seek(&mut self, range: Range<usize>) {
5382 let old_range = std::mem::replace(&mut self.range, range.clone());
5383 self.chunks.set_range(self.range.clone());
5384 if let Some(highlights) = self.highlights.as_mut() {
5385 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5386 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5387 highlights
5388 .stack
5389 .retain(|(end_offset, _)| *end_offset > range.start);
5390 if let Some(capture) = &highlights.next_capture
5391 && range.start >= capture.node.start_byte()
5392 {
5393 let next_capture_end = capture.node.end_byte();
5394 if range.start < next_capture_end {
5395 highlights.stack.push((
5396 next_capture_end,
5397 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5398 ));
5399 }
5400 highlights.next_capture.take();
5401 }
5402 } else if let Some(snapshot) = self.buffer_snapshot {
5403 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5404 *highlights = BufferChunkHighlights {
5405 captures,
5406 next_capture: None,
5407 stack: Default::default(),
5408 highlight_maps,
5409 };
5410 } else {
5411 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5412 // Seeking such BufferChunks is not supported.
5413 debug_assert!(
5414 false,
5415 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5416 );
5417 }
5418
5419 highlights.captures.set_byte_range(self.range.clone());
5420 self.initialize_diagnostic_endpoints();
5421 }
5422 }
5423
5424 fn initialize_diagnostic_endpoints(&mut self) {
5425 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5426 && let Some(buffer) = self.buffer_snapshot
5427 {
5428 let mut diagnostic_endpoints = Vec::new();
5429 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5430 diagnostic_endpoints.push(DiagnosticEndpoint {
5431 offset: entry.range.start,
5432 is_start: true,
5433 severity: entry.diagnostic.severity,
5434 is_unnecessary: entry.diagnostic.is_unnecessary,
5435 underline: entry.diagnostic.underline,
5436 });
5437 diagnostic_endpoints.push(DiagnosticEndpoint {
5438 offset: entry.range.end,
5439 is_start: false,
5440 severity: entry.diagnostic.severity,
5441 is_unnecessary: entry.diagnostic.is_unnecessary,
5442 underline: entry.diagnostic.underline,
5443 });
5444 }
5445 diagnostic_endpoints
5446 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5447 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5448 self.hint_depth = 0;
5449 self.error_depth = 0;
5450 self.warning_depth = 0;
5451 self.information_depth = 0;
5452 }
5453 }
5454
5455 /// The current byte offset in the buffer.
5456 pub fn offset(&self) -> usize {
5457 self.range.start
5458 }
5459
5460 pub fn range(&self) -> Range<usize> {
5461 self.range.clone()
5462 }
5463
5464 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5465 let depth = match endpoint.severity {
5466 DiagnosticSeverity::ERROR => &mut self.error_depth,
5467 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5468 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5469 DiagnosticSeverity::HINT => &mut self.hint_depth,
5470 _ => return,
5471 };
5472 if endpoint.is_start {
5473 *depth += 1;
5474 } else {
5475 *depth -= 1;
5476 }
5477
5478 if endpoint.is_unnecessary {
5479 if endpoint.is_start {
5480 self.unnecessary_depth += 1;
5481 } else {
5482 self.unnecessary_depth -= 1;
5483 }
5484 }
5485 }
5486
5487 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5488 if self.error_depth > 0 {
5489 Some(DiagnosticSeverity::ERROR)
5490 } else if self.warning_depth > 0 {
5491 Some(DiagnosticSeverity::WARNING)
5492 } else if self.information_depth > 0 {
5493 Some(DiagnosticSeverity::INFORMATION)
5494 } else if self.hint_depth > 0 {
5495 Some(DiagnosticSeverity::HINT)
5496 } else {
5497 None
5498 }
5499 }
5500
5501 fn current_code_is_unnecessary(&self) -> bool {
5502 self.unnecessary_depth > 0
5503 }
5504}
5505
5506impl<'a> Iterator for BufferChunks<'a> {
5507 type Item = Chunk<'a>;
5508
5509 fn next(&mut self) -> Option<Self::Item> {
5510 let mut next_capture_start = usize::MAX;
5511 let mut next_diagnostic_endpoint = usize::MAX;
5512
5513 if let Some(highlights) = self.highlights.as_mut() {
5514 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5515 if *parent_capture_end <= self.range.start {
5516 highlights.stack.pop();
5517 } else {
5518 break;
5519 }
5520 }
5521
5522 if highlights.next_capture.is_none() {
5523 highlights.next_capture = highlights.captures.next();
5524 }
5525
5526 while let Some(capture) = highlights.next_capture.as_ref() {
5527 if self.range.start < capture.node.start_byte() {
5528 next_capture_start = capture.node.start_byte();
5529 break;
5530 } else {
5531 let highlight_id =
5532 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5533 highlights
5534 .stack
5535 .push((capture.node.end_byte(), highlight_id));
5536 highlights.next_capture = highlights.captures.next();
5537 }
5538 }
5539 }
5540
5541 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5542 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5543 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5544 if endpoint.offset <= self.range.start {
5545 self.update_diagnostic_depths(endpoint);
5546 diagnostic_endpoints.next();
5547 self.underline = endpoint.underline;
5548 } else {
5549 next_diagnostic_endpoint = endpoint.offset;
5550 break;
5551 }
5552 }
5553 }
5554 self.diagnostic_endpoints = diagnostic_endpoints;
5555
5556 if let Some(ChunkBitmaps {
5557 text: chunk,
5558 chars: chars_map,
5559 tabs,
5560 }) = self.chunks.peek_with_bitmaps()
5561 {
5562 let chunk_start = self.range.start;
5563 let mut chunk_end = (self.chunks.offset() + chunk.len())
5564 .min(next_capture_start)
5565 .min(next_diagnostic_endpoint);
5566 let mut highlight_id = None;
5567 if let Some(highlights) = self.highlights.as_ref()
5568 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5569 {
5570 chunk_end = chunk_end.min(*parent_capture_end);
5571 highlight_id = Some(*parent_highlight_id);
5572 }
5573 let bit_start = chunk_start - self.chunks.offset();
5574 let bit_end = chunk_end - self.chunks.offset();
5575
5576 let slice = &chunk[bit_start..bit_end];
5577
5578 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5579 let tabs = (tabs >> bit_start) & mask;
5580 let chars = (chars_map >> bit_start) & mask;
5581
5582 self.range.start = chunk_end;
5583 if self.range.start == self.chunks.offset() + chunk.len() {
5584 self.chunks.next().unwrap();
5585 }
5586
5587 Some(Chunk {
5588 text: slice,
5589 syntax_highlight_id: highlight_id,
5590 underline: self.underline,
5591 diagnostic_severity: self.current_diagnostic_severity(),
5592 is_unnecessary: self.current_code_is_unnecessary(),
5593 tabs,
5594 chars,
5595 ..Chunk::default()
5596 })
5597 } else {
5598 None
5599 }
5600 }
5601}
5602
5603impl operation_queue::Operation for Operation {
5604 fn lamport_timestamp(&self) -> clock::Lamport {
5605 match self {
5606 Operation::Buffer(_) => {
5607 unreachable!("buffer operations should never be deferred at this layer")
5608 }
5609 Operation::UpdateDiagnostics {
5610 lamport_timestamp, ..
5611 }
5612 | Operation::UpdateSelections {
5613 lamport_timestamp, ..
5614 }
5615 | Operation::UpdateCompletionTriggers {
5616 lamport_timestamp, ..
5617 }
5618 | Operation::UpdateLineEnding {
5619 lamport_timestamp, ..
5620 } => *lamport_timestamp,
5621 }
5622 }
5623}
5624
5625impl Default for Diagnostic {
5626 fn default() -> Self {
5627 Self {
5628 source: Default::default(),
5629 source_kind: DiagnosticSourceKind::Other,
5630 code: None,
5631 code_description: None,
5632 severity: DiagnosticSeverity::ERROR,
5633 message: Default::default(),
5634 markdown: None,
5635 group_id: 0,
5636 is_primary: false,
5637 is_disk_based: false,
5638 is_unnecessary: false,
5639 underline: true,
5640 data: None,
5641 registration_id: None,
5642 }
5643 }
5644}
5645
5646impl IndentSize {
5647 /// Returns an [`IndentSize`] representing the given spaces.
5648 pub fn spaces(len: u32) -> Self {
5649 Self {
5650 len,
5651 kind: IndentKind::Space,
5652 }
5653 }
5654
5655 /// Returns an [`IndentSize`] representing a tab.
5656 pub fn tab() -> Self {
5657 Self {
5658 len: 1,
5659 kind: IndentKind::Tab,
5660 }
5661 }
5662
5663 /// An iterator over the characters represented by this [`IndentSize`].
5664 pub fn chars(&self) -> impl Iterator<Item = char> {
5665 iter::repeat(self.char()).take(self.len as usize)
5666 }
5667
5668 /// The character representation of this [`IndentSize`].
5669 pub fn char(&self) -> char {
5670 match self.kind {
5671 IndentKind::Space => ' ',
5672 IndentKind::Tab => '\t',
5673 }
5674 }
5675
5676 /// Consumes the current [`IndentSize`] and returns a new one that has
5677 /// been shrunk or enlarged by the given size along the given direction.
5678 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5679 match direction {
5680 Ordering::Less => {
5681 if self.kind == size.kind && self.len >= size.len {
5682 self.len -= size.len;
5683 }
5684 }
5685 Ordering::Equal => {}
5686 Ordering::Greater => {
5687 if self.len == 0 {
5688 self = size;
5689 } else if self.kind == size.kind {
5690 self.len += size.len;
5691 }
5692 }
5693 }
5694 self
5695 }
5696
5697 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5698 match self.kind {
5699 IndentKind::Space => self.len as usize,
5700 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5701 }
5702 }
5703}
5704
5705#[cfg(any(test, feature = "test-support"))]
5706pub struct TestFile {
5707 pub path: Arc<RelPath>,
5708 pub root_name: String,
5709 pub local_root: Option<PathBuf>,
5710}
5711
5712#[cfg(any(test, feature = "test-support"))]
5713impl File for TestFile {
5714 fn path(&self) -> &Arc<RelPath> {
5715 &self.path
5716 }
5717
5718 fn full_path(&self, _: &gpui::App) -> PathBuf {
5719 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5720 }
5721
5722 fn as_local(&self) -> Option<&dyn LocalFile> {
5723 if self.local_root.is_some() {
5724 Some(self)
5725 } else {
5726 None
5727 }
5728 }
5729
5730 fn disk_state(&self) -> DiskState {
5731 unimplemented!()
5732 }
5733
5734 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5735 self.path().file_name().unwrap_or(self.root_name.as_ref())
5736 }
5737
5738 fn worktree_id(&self, _: &App) -> WorktreeId {
5739 WorktreeId::from_usize(0)
5740 }
5741
5742 fn to_proto(&self, _: &App) -> rpc::proto::File {
5743 unimplemented!()
5744 }
5745
5746 fn is_private(&self) -> bool {
5747 false
5748 }
5749
5750 fn path_style(&self, _cx: &App) -> PathStyle {
5751 PathStyle::local()
5752 }
5753}
5754
5755#[cfg(any(test, feature = "test-support"))]
5756impl LocalFile for TestFile {
5757 fn abs_path(&self, _cx: &App) -> PathBuf {
5758 PathBuf::from(self.local_root.as_ref().unwrap())
5759 .join(&self.root_name)
5760 .join(self.path.as_std_path())
5761 }
5762
5763 fn load(&self, _cx: &App) -> Task<Result<String>> {
5764 unimplemented!()
5765 }
5766
5767 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5768 unimplemented!()
5769 }
5770}
5771
5772pub(crate) fn contiguous_ranges(
5773 values: impl Iterator<Item = u32>,
5774 max_len: usize,
5775) -> impl Iterator<Item = Range<u32>> {
5776 let mut values = values;
5777 let mut current_range: Option<Range<u32>> = None;
5778 std::iter::from_fn(move || {
5779 loop {
5780 if let Some(value) = values.next() {
5781 if let Some(range) = &mut current_range
5782 && value == range.end
5783 && range.len() < max_len
5784 {
5785 range.end += 1;
5786 continue;
5787 }
5788
5789 let prev_range = current_range.clone();
5790 current_range = Some(value..(value + 1));
5791 if prev_range.is_some() {
5792 return prev_range;
5793 }
5794 } else {
5795 return current_range.take();
5796 }
5797 }
5798 })
5799}
5800
5801#[derive(Default, Debug)]
5802pub struct CharClassifier {
5803 scope: Option<LanguageScope>,
5804 scope_context: Option<CharScopeContext>,
5805 ignore_punctuation: bool,
5806}
5807
5808impl CharClassifier {
5809 pub fn new(scope: Option<LanguageScope>) -> Self {
5810 Self {
5811 scope,
5812 scope_context: None,
5813 ignore_punctuation: false,
5814 }
5815 }
5816
5817 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5818 Self {
5819 scope_context,
5820 ..self
5821 }
5822 }
5823
5824 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5825 Self {
5826 ignore_punctuation,
5827 ..self
5828 }
5829 }
5830
5831 pub fn is_whitespace(&self, c: char) -> bool {
5832 self.kind(c) == CharKind::Whitespace
5833 }
5834
5835 pub fn is_word(&self, c: char) -> bool {
5836 self.kind(c) == CharKind::Word
5837 }
5838
5839 pub fn is_punctuation(&self, c: char) -> bool {
5840 self.kind(c) == CharKind::Punctuation
5841 }
5842
5843 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5844 if c.is_alphanumeric() || c == '_' {
5845 return CharKind::Word;
5846 }
5847
5848 if let Some(scope) = &self.scope {
5849 let characters = match self.scope_context {
5850 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5851 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5852 None => scope.word_characters(),
5853 };
5854 if let Some(characters) = characters
5855 && characters.contains(&c)
5856 {
5857 return CharKind::Word;
5858 }
5859 }
5860
5861 if c.is_whitespace() {
5862 return CharKind::Whitespace;
5863 }
5864
5865 if ignore_punctuation {
5866 CharKind::Word
5867 } else {
5868 CharKind::Punctuation
5869 }
5870 }
5871
5872 pub fn kind(&self, c: char) -> CharKind {
5873 self.kind_with(c, self.ignore_punctuation)
5874 }
5875}
5876
5877/// Find all of the ranges of whitespace that occur at the ends of lines
5878/// in the given rope.
5879///
5880/// This could also be done with a regex search, but this implementation
5881/// avoids copying text.
5882pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5883 let mut ranges = Vec::new();
5884
5885 let mut offset = 0;
5886 let mut prev_chunk_trailing_whitespace_range = 0..0;
5887 for chunk in rope.chunks() {
5888 let mut prev_line_trailing_whitespace_range = 0..0;
5889 for (i, line) in chunk.split('\n').enumerate() {
5890 let line_end_offset = offset + line.len();
5891 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5892 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5893
5894 if i == 0 && trimmed_line_len == 0 {
5895 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5896 }
5897 if !prev_line_trailing_whitespace_range.is_empty() {
5898 ranges.push(prev_line_trailing_whitespace_range);
5899 }
5900
5901 offset = line_end_offset + 1;
5902 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5903 }
5904
5905 offset -= 1;
5906 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5907 }
5908
5909 if !prev_chunk_trailing_whitespace_range.is_empty() {
5910 ranges.push(prev_chunk_trailing_whitespace_range);
5911 }
5912
5913 ranges
5914}