1pub mod row_chunk;
2
3use crate::{
4 DebuggerTextObject, LanguageScope, Outline, OutlineConfig, PLAIN_TEXT, RunnableCapture,
5 RunnableTag, TextObject, TreeSitterOptions,
6 diagnostic_set::{DiagnosticEntry, DiagnosticEntryRef, DiagnosticGroup},
7 language_settings::{LanguageSettings, language_settings},
8 outline::OutlineItem,
9 row_chunk::RowChunks,
10 syntax_map::{
11 MAX_BYTES_TO_QUERY, SyntaxLayer, SyntaxMap, SyntaxMapCapture, SyntaxMapCaptures,
12 SyntaxMapMatch, SyntaxMapMatches, SyntaxSnapshot, ToTreeSitterPoint,
13 },
14 task_context::RunnableRange,
15 text_diff::text_diff,
16 unified_diff_with_offsets,
17};
18pub use crate::{
19 Grammar, Language, LanguageRegistry,
20 diagnostic_set::DiagnosticSet,
21 highlight_map::{HighlightId, HighlightMap},
22 proto,
23};
24use anyhow::{Context as _, Result};
25use clock::Lamport;
26pub use clock::ReplicaId;
27use collections::{HashMap, HashSet};
28use encoding_rs::Encoding;
29use fs::MTime;
30use futures::channel::oneshot;
31use gpui::{
32 App, AppContext as _, Context, Entity, EventEmitter, HighlightStyle, SharedString, StyledText,
33 Task, TextStyle,
34};
35
36use lsp::{LanguageServerId, NumberOrString};
37use parking_lot::Mutex;
38use serde::{Deserialize, Serialize};
39use serde_json::Value;
40use settings::WorktreeId;
41use smallvec::SmallVec;
42use smol::future::yield_now;
43use std::{
44 any::Any,
45 borrow::Cow,
46 cell::Cell,
47 cmp::{self, Ordering, Reverse},
48 collections::{BTreeMap, BTreeSet},
49 future::Future,
50 iter::{self, Iterator, Peekable},
51 mem,
52 num::NonZeroU32,
53 ops::{Deref, Range},
54 path::PathBuf,
55 rc,
56 sync::Arc,
57 time::{Duration, Instant},
58 vec,
59};
60use sum_tree::TreeMap;
61use text::operation_queue::OperationQueue;
62use text::*;
63pub use text::{
64 Anchor, Bias, Buffer as TextBuffer, BufferId, BufferSnapshot as TextBufferSnapshot, Edit,
65 LineIndent, OffsetRangeExt, OffsetUtf16, Patch, Point, PointUtf16, Rope, Selection,
66 SelectionGoal, Subscription, TextDimension, TextSummary, ToOffset, ToOffsetUtf16, ToPoint,
67 ToPointUtf16, Transaction, TransactionId, Unclipped,
68};
69use theme::{ActiveTheme as _, SyntaxTheme};
70#[cfg(any(test, feature = "test-support"))]
71use util::RandomCharIter;
72use util::{RangeExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
73
74#[cfg(any(test, feature = "test-support"))]
75pub use {tree_sitter_python, tree_sitter_rust, tree_sitter_typescript};
76
77pub use lsp::DiagnosticSeverity;
78
79/// Indicate whether a [`Buffer`] has permissions to edit.
80#[derive(PartialEq, Clone, Copy, Debug)]
81pub enum Capability {
82 /// The buffer is a mutable replica.
83 ReadWrite,
84 /// The buffer is a mutable replica, but toggled to be only readable.
85 Read,
86 /// The buffer is a read-only replica.
87 ReadOnly,
88}
89
90impl Capability {
91 /// Returns `true` if the capability is `ReadWrite`.
92 pub fn editable(self) -> bool {
93 matches!(self, Capability::ReadWrite)
94 }
95}
96
97pub type BufferRow = u32;
98
99/// An in-memory representation of a source code file, including its text,
100/// syntax trees, git status, and diagnostics.
101pub struct Buffer {
102 text: TextBuffer,
103 branch_state: Option<BufferBranchState>,
104 /// Filesystem state, `None` when there is no path.
105 file: Option<Arc<dyn File>>,
106 /// The mtime of the file when this buffer was last loaded from
107 /// or saved to disk.
108 saved_mtime: Option<MTime>,
109 /// The version vector when this buffer was last loaded from
110 /// or saved to disk.
111 saved_version: clock::Global,
112 preview_version: clock::Global,
113 transaction_depth: usize,
114 was_dirty_before_starting_transaction: Option<bool>,
115 reload_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 autoindent_requests: Vec<Arc<AutoindentRequest>>,
118 wait_for_autoindent_txs: Vec<oneshot::Sender<()>>,
119 pending_autoindent: Option<Task<()>>,
120 sync_parse_timeout: Option<Duration>,
121 syntax_map: Mutex<SyntaxMap>,
122 reparse: Option<Task<()>>,
123 parse_status: (watch::Sender<ParseStatus>, watch::Receiver<ParseStatus>),
124 non_text_state_update_count: usize,
125 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
126 remote_selections: TreeMap<ReplicaId, SelectionSet>,
127 diagnostics_timestamp: clock::Lamport,
128 completion_triggers: BTreeSet<String>,
129 completion_triggers_per_language_server: HashMap<LanguageServerId, BTreeSet<String>>,
130 completion_triggers_timestamp: clock::Lamport,
131 deferred_ops: OperationQueue<Operation>,
132 capability: Capability,
133 has_conflict: bool,
134 /// Memoize calls to has_changes_since(saved_version).
135 /// The contents of a cell are (self.version, has_changes) at the time of a last call.
136 has_unsaved_edits: Cell<(clock::Global, bool)>,
137 change_bits: Vec<rc::Weak<Cell<bool>>>,
138 _subscriptions: Vec<gpui::Subscription>,
139 tree_sitter_data: Arc<TreeSitterData>,
140 encoding: &'static Encoding,
141 has_bom: bool,
142}
143
144#[derive(Debug)]
145pub struct TreeSitterData {
146 chunks: RowChunks,
147 brackets_by_chunks: Mutex<Vec<Option<Vec<BracketMatch<usize>>>>>,
148}
149
150const MAX_ROWS_IN_A_CHUNK: u32 = 50;
151
152impl TreeSitterData {
153 fn clear(&mut self, snapshot: text::BufferSnapshot) {
154 self.chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
155 self.brackets_by_chunks.get_mut().clear();
156 self.brackets_by_chunks
157 .get_mut()
158 .resize(self.chunks.len(), None);
159 }
160
161 fn new(snapshot: text::BufferSnapshot) -> Self {
162 let chunks = RowChunks::new(snapshot, MAX_ROWS_IN_A_CHUNK);
163 Self {
164 brackets_by_chunks: Mutex::new(vec![None; chunks.len()]),
165 chunks,
166 }
167 }
168
169 fn version(&self) -> &clock::Global {
170 self.chunks.version()
171 }
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq)]
175pub enum ParseStatus {
176 Idle,
177 Parsing,
178}
179
180struct BufferBranchState {
181 base_buffer: Entity<Buffer>,
182 merged_operations: Vec<Lamport>,
183}
184
185/// An immutable, cheaply cloneable representation of a fixed
186/// state of a buffer.
187pub struct BufferSnapshot {
188 pub text: text::BufferSnapshot,
189 pub syntax: SyntaxSnapshot,
190 file: Option<Arc<dyn File>>,
191 diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
192 remote_selections: TreeMap<ReplicaId, SelectionSet>,
193 language: Option<Arc<Language>>,
194 non_text_state_update_count: usize,
195 tree_sitter_data: Arc<TreeSitterData>,
196 pub capability: Capability,
197}
198
199/// The kind and amount of indentation in a particular line. For now,
200/// assumes that indentation is all the same character.
201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
202pub struct IndentSize {
203 /// The number of bytes that comprise the indentation.
204 pub len: u32,
205 /// The kind of whitespace used for indentation.
206 pub kind: IndentKind,
207}
208
209/// A whitespace character that's used for indentation.
210#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
211pub enum IndentKind {
212 /// An ASCII space character.
213 #[default]
214 Space,
215 /// An ASCII tab character.
216 Tab,
217}
218
219/// The shape of a selection cursor.
220#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
221pub enum CursorShape {
222 /// A vertical bar
223 #[default]
224 Bar,
225 /// A block that surrounds the following character
226 Block,
227 /// An underline that runs along the following character
228 Underline,
229 /// A box drawn around the following character
230 Hollow,
231}
232
233impl From<settings::CursorShape> for CursorShape {
234 fn from(shape: settings::CursorShape) -> Self {
235 match shape {
236 settings::CursorShape::Bar => CursorShape::Bar,
237 settings::CursorShape::Block => CursorShape::Block,
238 settings::CursorShape::Underline => CursorShape::Underline,
239 settings::CursorShape::Hollow => CursorShape::Hollow,
240 }
241 }
242}
243
244#[derive(Clone, Debug)]
245struct SelectionSet {
246 line_mode: bool,
247 cursor_shape: CursorShape,
248 selections: Arc<[Selection<Anchor>]>,
249 lamport_timestamp: clock::Lamport,
250}
251
252/// A diagnostic associated with a certain range of a buffer.
253#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
254pub struct Diagnostic {
255 /// The name of the service that produced this diagnostic.
256 pub source: Option<String>,
257 /// The ID provided by the dynamic registration that produced this diagnostic.
258 pub registration_id: Option<SharedString>,
259 /// A machine-readable code that identifies this diagnostic.
260 pub code: Option<NumberOrString>,
261 pub code_description: Option<lsp::Uri>,
262 /// Whether this diagnostic is a hint, warning, or error.
263 pub severity: DiagnosticSeverity,
264 /// The human-readable message associated with this diagnostic.
265 pub message: String,
266 /// The human-readable message (in markdown format)
267 pub markdown: Option<String>,
268 /// An id that identifies the group to which this diagnostic belongs.
269 ///
270 /// When a language server produces a diagnostic with
271 /// one or more associated diagnostics, those diagnostics are all
272 /// assigned a single group ID.
273 pub group_id: usize,
274 /// Whether this diagnostic is the primary diagnostic for its group.
275 ///
276 /// In a given group, the primary diagnostic is the top-level diagnostic
277 /// returned by the language server. The non-primary diagnostics are the
278 /// associated diagnostics.
279 pub is_primary: bool,
280 /// Whether this diagnostic is considered to originate from an analysis of
281 /// files on disk, as opposed to any unsaved buffer contents. This is a
282 /// property of a given diagnostic source, and is configured for a given
283 /// language server via the [`LspAdapter::disk_based_diagnostic_sources`](crate::LspAdapter::disk_based_diagnostic_sources) method
284 /// for the language server.
285 pub is_disk_based: bool,
286 /// Whether this diagnostic marks unnecessary code.
287 pub is_unnecessary: bool,
288 /// Quick separation of diagnostics groups based by their source.
289 pub source_kind: DiagnosticSourceKind,
290 /// Data from language server that produced this diagnostic. Passed back to the LS when we request code actions for this diagnostic.
291 pub data: Option<Value>,
292 /// Whether to underline the corresponding text range in the editor.
293 pub underline: bool,
294}
295
296#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
297pub enum DiagnosticSourceKind {
298 Pulled,
299 Pushed,
300 Other,
301}
302
303/// An operation used to synchronize this buffer with its other replicas.
304#[derive(Clone, Debug, PartialEq)]
305pub enum Operation {
306 /// A text operation.
307 Buffer(text::Operation),
308
309 /// An update to the buffer's diagnostics.
310 UpdateDiagnostics {
311 /// The id of the language server that produced the new diagnostics.
312 server_id: LanguageServerId,
313 /// The diagnostics.
314 diagnostics: Arc<[DiagnosticEntry<Anchor>]>,
315 /// The buffer's lamport timestamp.
316 lamport_timestamp: clock::Lamport,
317 },
318
319 /// An update to the most recent selections in this buffer.
320 UpdateSelections {
321 /// The selections.
322 selections: Arc<[Selection<Anchor>]>,
323 /// The buffer's lamport timestamp.
324 lamport_timestamp: clock::Lamport,
325 /// Whether the selections are in 'line mode'.
326 line_mode: bool,
327 /// The [`CursorShape`] associated with these selections.
328 cursor_shape: CursorShape,
329 },
330
331 /// An update to the characters that should trigger autocompletion
332 /// for this buffer.
333 UpdateCompletionTriggers {
334 /// The characters that trigger autocompletion.
335 triggers: Vec<String>,
336 /// The buffer's lamport timestamp.
337 lamport_timestamp: clock::Lamport,
338 /// The language server ID.
339 server_id: LanguageServerId,
340 },
341
342 /// An update to the line ending type of this buffer.
343 UpdateLineEnding {
344 /// The line ending type.
345 line_ending: LineEnding,
346 /// The buffer's lamport timestamp.
347 lamport_timestamp: clock::Lamport,
348 },
349}
350
351/// An event that occurs in a buffer.
352#[derive(Clone, Debug, PartialEq)]
353pub enum BufferEvent {
354 /// The buffer was changed in a way that must be
355 /// propagated to its other replicas.
356 Operation {
357 operation: Operation,
358 is_local: bool,
359 },
360 /// The buffer was edited.
361 Edited,
362 /// The buffer's `dirty` bit changed.
363 DirtyChanged,
364 /// The buffer was saved.
365 Saved,
366 /// The buffer's file was changed on disk.
367 FileHandleChanged,
368 /// The buffer was reloaded.
369 Reloaded,
370 /// The buffer is in need of a reload
371 ReloadNeeded,
372 /// The buffer's language was changed.
373 /// The boolean indicates whether this buffer did not have a language before, but does now.
374 LanguageChanged(bool),
375 /// The buffer's syntax trees were updated.
376 Reparsed,
377 /// The buffer's diagnostics were updated.
378 DiagnosticsUpdated,
379 /// The buffer gained or lost editing capabilities.
380 CapabilityChanged,
381}
382
383/// The file associated with a buffer.
384pub trait File: Send + Sync + Any {
385 /// Returns the [`LocalFile`] associated with this file, if the
386 /// file is local.
387 fn as_local(&self) -> Option<&dyn LocalFile>;
388
389 /// Returns whether this file is local.
390 fn is_local(&self) -> bool {
391 self.as_local().is_some()
392 }
393
394 /// Returns whether the file is new, exists in storage, or has been deleted. Includes metadata
395 /// only available in some states, such as modification time.
396 fn disk_state(&self) -> DiskState;
397
398 /// Returns the path of this file relative to the worktree's root directory.
399 fn path(&self) -> &Arc<RelPath>;
400
401 /// Returns the path of this file relative to the worktree's parent directory (this means it
402 /// includes the name of the worktree's root folder).
403 fn full_path(&self, cx: &App) -> PathBuf;
404
405 /// Returns the path style of this file.
406 fn path_style(&self, cx: &App) -> PathStyle;
407
408 /// Returns the last component of this handle's absolute path. If this handle refers to the root
409 /// of its worktree, then this method will return the name of the worktree itself.
410 fn file_name<'a>(&'a self, cx: &'a App) -> &'a str;
411
412 /// Returns the id of the worktree to which this file belongs.
413 ///
414 /// This is needed for looking up project-specific settings.
415 fn worktree_id(&self, cx: &App) -> WorktreeId;
416
417 /// Converts this file into a protobuf message.
418 fn to_proto(&self, cx: &App) -> rpc::proto::File;
419
420 /// Return whether Zed considers this to be a private file.
421 fn is_private(&self) -> bool;
422
423 fn can_open(&self) -> bool {
424 !self.is_local()
425 }
426}
427
428/// The file's storage status - whether it's stored (`Present`), and if so when it was last
429/// modified. In the case where the file is not stored, it can be either `New` or `Deleted`. In the
430/// UI these two states are distinguished. For example, the buffer tab does not display a deletion
431/// indicator for new files.
432#[derive(Copy, Clone, Debug, PartialEq)]
433pub enum DiskState {
434 /// File created in Zed that has not been saved.
435 New,
436 /// File present on the filesystem.
437 Present { mtime: MTime },
438 /// Deleted file that was previously present.
439 Deleted,
440 /// An old version of a file that was previously present
441 /// usually from a version control system. e.g. A git blob
442 Historic { was_deleted: bool },
443}
444
445impl DiskState {
446 /// Returns the file's last known modification time on disk.
447 pub fn mtime(self) -> Option<MTime> {
448 match self {
449 DiskState::New => None,
450 DiskState::Present { mtime } => Some(mtime),
451 DiskState::Deleted => None,
452 DiskState::Historic { .. } => None,
453 }
454 }
455
456 pub fn exists(&self) -> bool {
457 match self {
458 DiskState::New => false,
459 DiskState::Present { .. } => true,
460 DiskState::Deleted => false,
461 DiskState::Historic { .. } => false,
462 }
463 }
464
465 /// Returns true if this state represents a deleted file.
466 pub fn is_deleted(&self) -> bool {
467 match self {
468 DiskState::Deleted => true,
469 DiskState::Historic { was_deleted } => *was_deleted,
470 _ => false,
471 }
472 }
473}
474
475/// The file associated with a buffer, in the case where the file is on the local disk.
476pub trait LocalFile: File {
477 /// Returns the absolute path of this file
478 fn abs_path(&self, cx: &App) -> PathBuf;
479
480 /// Loads the file contents from disk and returns them as a UTF-8 encoded string.
481 fn load(&self, cx: &App) -> Task<Result<String>>;
482
483 /// Loads the file's contents from disk.
484 fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
485}
486
487/// The auto-indent behavior associated with an editing operation.
488/// For some editing operations, each affected line of text has its
489/// indentation recomputed. For other operations, the entire block
490/// of edited text is adjusted uniformly.
491#[derive(Clone, Debug)]
492pub enum AutoindentMode {
493 /// Indent each line of inserted text.
494 EachLine,
495 /// Apply the same indentation adjustment to all of the lines
496 /// in a given insertion.
497 Block {
498 /// The original indentation column of the first line of each
499 /// insertion, if it has been copied.
500 ///
501 /// Knowing this makes it possible to preserve the relative indentation
502 /// of every line in the insertion from when it was copied.
503 ///
504 /// If the original indent column is `a`, and the first line of insertion
505 /// is then auto-indented to column `b`, then every other line of
506 /// the insertion will be auto-indented to column `b - a`
507 original_indent_columns: Vec<Option<u32>>,
508 },
509}
510
511#[derive(Clone)]
512struct AutoindentRequest {
513 before_edit: BufferSnapshot,
514 entries: Vec<AutoindentRequestEntry>,
515 is_block_mode: bool,
516 ignore_empty_lines: bool,
517}
518
519#[derive(Debug, Clone)]
520struct AutoindentRequestEntry {
521 /// A range of the buffer whose indentation should be adjusted.
522 range: Range<Anchor>,
523 /// The row of the edit start in the buffer before the edit was applied.
524 /// This is stored here because the anchor in range is created after
525 /// the edit, so it cannot be used with the before_edit snapshot.
526 old_row: Option<u32>,
527 indent_size: IndentSize,
528 original_indent_column: Option<u32>,
529}
530
531#[derive(Debug)]
532struct IndentSuggestion {
533 basis_row: u32,
534 delta: Ordering,
535 within_error: bool,
536}
537
538struct BufferChunkHighlights<'a> {
539 captures: SyntaxMapCaptures<'a>,
540 next_capture: Option<SyntaxMapCapture<'a>>,
541 stack: Vec<(usize, HighlightId)>,
542 highlight_maps: Vec<HighlightMap>,
543}
544
545/// An iterator that yields chunks of a buffer's text, along with their
546/// syntax highlights and diagnostic status.
547pub struct BufferChunks<'a> {
548 buffer_snapshot: Option<&'a BufferSnapshot>,
549 range: Range<usize>,
550 chunks: text::Chunks<'a>,
551 diagnostic_endpoints: Option<Peekable<vec::IntoIter<DiagnosticEndpoint>>>,
552 error_depth: usize,
553 warning_depth: usize,
554 information_depth: usize,
555 hint_depth: usize,
556 unnecessary_depth: usize,
557 underline: bool,
558 highlights: Option<BufferChunkHighlights<'a>>,
559}
560
561/// A chunk of a buffer's text, along with its syntax highlight and
562/// diagnostic status.
563#[derive(Clone, Debug, Default)]
564pub struct Chunk<'a> {
565 /// The text of the chunk.
566 pub text: &'a str,
567 /// The syntax highlighting style of the chunk.
568 pub syntax_highlight_id: Option<HighlightId>,
569 /// The highlight style that has been applied to this chunk in
570 /// the editor.
571 pub highlight_style: Option<HighlightStyle>,
572 /// The severity of diagnostic associated with this chunk, if any.
573 pub diagnostic_severity: Option<DiagnosticSeverity>,
574 /// A bitset of which characters are tabs in this string.
575 pub tabs: u128,
576 /// Bitmap of character indices in this chunk
577 pub chars: u128,
578 /// Whether this chunk of text is marked as unnecessary.
579 pub is_unnecessary: bool,
580 /// Whether this chunk of text was originally a tab character.
581 pub is_tab: bool,
582 /// Whether this chunk of text was originally an inlay.
583 pub is_inlay: bool,
584 /// Whether to underline the corresponding text range in the editor.
585 pub underline: bool,
586}
587
588/// A set of edits to a given version of a buffer, computed asynchronously.
589#[derive(Debug)]
590pub struct Diff {
591 pub base_version: clock::Global,
592 pub line_ending: LineEnding,
593 pub edits: Vec<(Range<usize>, Arc<str>)>,
594}
595
596#[derive(Debug, Clone, Copy)]
597pub(crate) struct DiagnosticEndpoint {
598 offset: usize,
599 is_start: bool,
600 underline: bool,
601 severity: DiagnosticSeverity,
602 is_unnecessary: bool,
603}
604
605/// A class of characters, used for characterizing a run of text.
606#[derive(Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Debug)]
607pub enum CharKind {
608 /// Whitespace.
609 Whitespace,
610 /// Punctuation.
611 Punctuation,
612 /// Word.
613 Word,
614}
615
616/// Context for character classification within a specific scope.
617#[derive(Copy, Clone, Eq, PartialEq, Debug)]
618pub enum CharScopeContext {
619 /// Character classification for completion queries.
620 ///
621 /// This context treats certain characters as word constituents that would
622 /// normally be considered punctuation, such as '-' in Tailwind classes
623 /// ("bg-yellow-100") or '.' in import paths ("foo.ts").
624 Completion,
625 /// Character classification for linked edits.
626 ///
627 /// This context handles characters that should be treated as part of
628 /// identifiers during linked editing operations, such as '.' in JSX
629 /// component names like `<Animated.View>`.
630 LinkedEdit,
631}
632
633/// A runnable is a set of data about a region that could be resolved into a task
634pub struct Runnable {
635 pub tags: SmallVec<[RunnableTag; 1]>,
636 pub language: Arc<Language>,
637 pub buffer: BufferId,
638}
639
640#[derive(Default, Clone, Debug)]
641pub struct HighlightedText {
642 pub text: SharedString,
643 pub highlights: Vec<(Range<usize>, HighlightStyle)>,
644}
645
646#[derive(Default, Debug)]
647struct HighlightedTextBuilder {
648 pub text: String,
649 highlights: Vec<(Range<usize>, HighlightStyle)>,
650}
651
652impl HighlightedText {
653 pub fn from_buffer_range<T: ToOffset>(
654 range: Range<T>,
655 snapshot: &text::BufferSnapshot,
656 syntax_snapshot: &SyntaxSnapshot,
657 override_style: Option<HighlightStyle>,
658 syntax_theme: &SyntaxTheme,
659 ) -> Self {
660 let mut highlighted_text = HighlightedTextBuilder::default();
661 highlighted_text.add_text_from_buffer_range(
662 range,
663 snapshot,
664 syntax_snapshot,
665 override_style,
666 syntax_theme,
667 );
668 highlighted_text.build()
669 }
670
671 pub fn to_styled_text(&self, default_style: &TextStyle) -> StyledText {
672 gpui::StyledText::new(self.text.clone())
673 .with_default_highlights(default_style, self.highlights.iter().cloned())
674 }
675
676 /// Returns the first line without leading whitespace unless highlighted
677 /// and a boolean indicating if there are more lines after
678 pub fn first_line_preview(self) -> (Self, bool) {
679 let newline_ix = self.text.find('\n').unwrap_or(self.text.len());
680 let first_line = &self.text[..newline_ix];
681
682 // Trim leading whitespace, unless an edit starts prior to it.
683 let mut preview_start_ix = first_line.len() - first_line.trim_start().len();
684 if let Some((first_highlight_range, _)) = self.highlights.first() {
685 preview_start_ix = preview_start_ix.min(first_highlight_range.start);
686 }
687
688 let preview_text = &first_line[preview_start_ix..];
689 let preview_highlights = self
690 .highlights
691 .into_iter()
692 .skip_while(|(range, _)| range.end <= preview_start_ix)
693 .take_while(|(range, _)| range.start < newline_ix)
694 .filter_map(|(mut range, highlight)| {
695 range.start = range.start.saturating_sub(preview_start_ix);
696 range.end = range.end.min(newline_ix).saturating_sub(preview_start_ix);
697 if range.is_empty() {
698 None
699 } else {
700 Some((range, highlight))
701 }
702 });
703
704 let preview = Self {
705 text: SharedString::new(preview_text),
706 highlights: preview_highlights.collect(),
707 };
708
709 (preview, self.text.len() > newline_ix)
710 }
711}
712
713impl HighlightedTextBuilder {
714 pub fn build(self) -> HighlightedText {
715 HighlightedText {
716 text: self.text.into(),
717 highlights: self.highlights,
718 }
719 }
720
721 pub fn add_text_from_buffer_range<T: ToOffset>(
722 &mut self,
723 range: Range<T>,
724 snapshot: &text::BufferSnapshot,
725 syntax_snapshot: &SyntaxSnapshot,
726 override_style: Option<HighlightStyle>,
727 syntax_theme: &SyntaxTheme,
728 ) {
729 let range = range.to_offset(snapshot);
730 for chunk in Self::highlighted_chunks(range, snapshot, syntax_snapshot) {
731 let start = self.text.len();
732 self.text.push_str(chunk.text);
733 let end = self.text.len();
734
735 if let Some(highlight_style) = chunk
736 .syntax_highlight_id
737 .and_then(|id| id.style(syntax_theme))
738 {
739 let highlight_style = override_style.map_or(highlight_style, |override_style| {
740 highlight_style.highlight(override_style)
741 });
742 self.highlights.push((start..end, highlight_style));
743 } else if let Some(override_style) = override_style {
744 self.highlights.push((start..end, override_style));
745 }
746 }
747 }
748
749 fn highlighted_chunks<'a>(
750 range: Range<usize>,
751 snapshot: &'a text::BufferSnapshot,
752 syntax_snapshot: &'a SyntaxSnapshot,
753 ) -> BufferChunks<'a> {
754 let captures = syntax_snapshot.captures(range.clone(), snapshot, |grammar| {
755 grammar
756 .highlights_config
757 .as_ref()
758 .map(|config| &config.query)
759 });
760
761 let highlight_maps = captures
762 .grammars()
763 .iter()
764 .map(|grammar| grammar.highlight_map())
765 .collect();
766
767 BufferChunks::new(
768 snapshot.as_rope(),
769 range,
770 Some((captures, highlight_maps)),
771 false,
772 None,
773 )
774 }
775}
776
777#[derive(Clone)]
778pub struct EditPreview {
779 old_snapshot: text::BufferSnapshot,
780 applied_edits_snapshot: text::BufferSnapshot,
781 syntax_snapshot: SyntaxSnapshot,
782}
783
784impl EditPreview {
785 pub fn as_unified_diff(
786 &self,
787 file: Option<&Arc<dyn File>>,
788 edits: &[(Range<Anchor>, impl AsRef<str>)],
789 ) -> Option<String> {
790 let (first, _) = edits.first()?;
791 let (last, _) = edits.last()?;
792
793 let start = first.start.to_point(&self.old_snapshot);
794 let old_end = last.end.to_point(&self.old_snapshot);
795 let new_end = last
796 .end
797 .bias_right(&self.old_snapshot)
798 .to_point(&self.applied_edits_snapshot);
799
800 let start = Point::new(start.row.saturating_sub(3), 0);
801 let old_end = Point::new(old_end.row + 4, 0).min(self.old_snapshot.max_point());
802 let new_end = Point::new(new_end.row + 4, 0).min(self.applied_edits_snapshot.max_point());
803
804 let diff_body = unified_diff_with_offsets(
805 &self
806 .old_snapshot
807 .text_for_range(start..old_end)
808 .collect::<String>(),
809 &self
810 .applied_edits_snapshot
811 .text_for_range(start..new_end)
812 .collect::<String>(),
813 start.row,
814 start.row,
815 );
816
817 let path = file.map(|f| f.path().as_unix_str());
818 let header = match path {
819 Some(p) => format!("--- a/{}\n+++ b/{}\n", p, p),
820 None => String::new(),
821 };
822
823 Some(format!("{}{}", header, diff_body))
824 }
825
826 pub fn highlight_edits(
827 &self,
828 current_snapshot: &BufferSnapshot,
829 edits: &[(Range<Anchor>, impl AsRef<str>)],
830 include_deletions: bool,
831 cx: &App,
832 ) -> HighlightedText {
833 let Some(visible_range_in_preview_snapshot) = self.compute_visible_range(edits) else {
834 return HighlightedText::default();
835 };
836
837 let mut highlighted_text = HighlightedTextBuilder::default();
838
839 let visible_range_in_preview_snapshot =
840 visible_range_in_preview_snapshot.to_offset(&self.applied_edits_snapshot);
841 let mut offset_in_preview_snapshot = visible_range_in_preview_snapshot.start;
842
843 let insertion_highlight_style = HighlightStyle {
844 background_color: Some(cx.theme().status().created_background),
845 ..Default::default()
846 };
847 let deletion_highlight_style = HighlightStyle {
848 background_color: Some(cx.theme().status().deleted_background),
849 ..Default::default()
850 };
851 let syntax_theme = cx.theme().syntax();
852
853 for (range, edit_text) in edits {
854 let edit_new_end_in_preview_snapshot = range
855 .end
856 .bias_right(&self.old_snapshot)
857 .to_offset(&self.applied_edits_snapshot);
858 let edit_start_in_preview_snapshot =
859 edit_new_end_in_preview_snapshot - edit_text.as_ref().len();
860
861 let unchanged_range_in_preview_snapshot =
862 offset_in_preview_snapshot..edit_start_in_preview_snapshot;
863 if !unchanged_range_in_preview_snapshot.is_empty() {
864 highlighted_text.add_text_from_buffer_range(
865 unchanged_range_in_preview_snapshot,
866 &self.applied_edits_snapshot,
867 &self.syntax_snapshot,
868 None,
869 syntax_theme,
870 );
871 }
872
873 let range_in_current_snapshot = range.to_offset(current_snapshot);
874 if include_deletions && !range_in_current_snapshot.is_empty() {
875 highlighted_text.add_text_from_buffer_range(
876 range_in_current_snapshot,
877 ¤t_snapshot.text,
878 ¤t_snapshot.syntax,
879 Some(deletion_highlight_style),
880 syntax_theme,
881 );
882 }
883
884 if !edit_text.as_ref().is_empty() {
885 highlighted_text.add_text_from_buffer_range(
886 edit_start_in_preview_snapshot..edit_new_end_in_preview_snapshot,
887 &self.applied_edits_snapshot,
888 &self.syntax_snapshot,
889 Some(insertion_highlight_style),
890 syntax_theme,
891 );
892 }
893
894 offset_in_preview_snapshot = edit_new_end_in_preview_snapshot;
895 }
896
897 highlighted_text.add_text_from_buffer_range(
898 offset_in_preview_snapshot..visible_range_in_preview_snapshot.end,
899 &self.applied_edits_snapshot,
900 &self.syntax_snapshot,
901 None,
902 syntax_theme,
903 );
904
905 highlighted_text.build()
906 }
907
908 pub fn build_result_buffer(&self, cx: &mut App) -> Entity<Buffer> {
909 cx.new(|cx| {
910 let mut buffer = Buffer::local_normalized(
911 self.applied_edits_snapshot.as_rope().clone(),
912 self.applied_edits_snapshot.line_ending(),
913 cx,
914 );
915 buffer.set_language_async(self.syntax_snapshot.root_language(), cx);
916 buffer
917 })
918 }
919
920 pub fn compute_visible_range<T>(&self, edits: &[(Range<Anchor>, T)]) -> Option<Range<Point>> {
921 let (first, _) = edits.first()?;
922 let (last, _) = edits.last()?;
923
924 let start = first
925 .start
926 .bias_left(&self.old_snapshot)
927 .to_point(&self.applied_edits_snapshot);
928 let end = last
929 .end
930 .bias_right(&self.old_snapshot)
931 .to_point(&self.applied_edits_snapshot);
932
933 // Ensure that the first line of the first edit and the last line of the last edit are always fully visible
934 let range = Point::new(start.row, 0)
935 ..Point::new(end.row, self.applied_edits_snapshot.line_len(end.row));
936
937 Some(range)
938 }
939}
940
941#[derive(Clone, Debug, PartialEq, Eq)]
942pub struct BracketMatch<T> {
943 pub open_range: Range<T>,
944 pub close_range: Range<T>,
945 pub newline_only: bool,
946 pub syntax_layer_depth: usize,
947 pub color_index: Option<usize>,
948}
949
950impl<T> BracketMatch<T> {
951 pub fn bracket_ranges(self) -> (Range<T>, Range<T>) {
952 (self.open_range, self.close_range)
953 }
954}
955
956impl Buffer {
957 /// Create a new buffer with the given base text.
958 pub fn local<T: Into<String>>(base_text: T, cx: &Context<Self>) -> Self {
959 Self::build(
960 TextBuffer::new(
961 ReplicaId::LOCAL,
962 cx.entity_id().as_non_zero_u64().into(),
963 base_text.into(),
964 ),
965 None,
966 Capability::ReadWrite,
967 )
968 }
969
970 /// Create a new buffer with the given base text that has proper line endings and other normalization applied.
971 pub fn local_normalized(
972 base_text_normalized: Rope,
973 line_ending: LineEnding,
974 cx: &Context<Self>,
975 ) -> Self {
976 Self::build(
977 TextBuffer::new_normalized(
978 ReplicaId::LOCAL,
979 cx.entity_id().as_non_zero_u64().into(),
980 line_ending,
981 base_text_normalized,
982 ),
983 None,
984 Capability::ReadWrite,
985 )
986 }
987
988 /// Create a new buffer that is a replica of a remote buffer.
989 pub fn remote(
990 remote_id: BufferId,
991 replica_id: ReplicaId,
992 capability: Capability,
993 base_text: impl Into<String>,
994 ) -> Self {
995 Self::build(
996 TextBuffer::new(replica_id, remote_id, base_text.into()),
997 None,
998 capability,
999 )
1000 }
1001
1002 /// Create a new buffer that is a replica of a remote buffer, populating its
1003 /// state from the given protobuf message.
1004 pub fn from_proto(
1005 replica_id: ReplicaId,
1006 capability: Capability,
1007 message: proto::BufferState,
1008 file: Option<Arc<dyn File>>,
1009 ) -> Result<Self> {
1010 let buffer_id = BufferId::new(message.id).context("Could not deserialize buffer_id")?;
1011 let buffer = TextBuffer::new(replica_id, buffer_id, message.base_text);
1012 let mut this = Self::build(buffer, file, capability);
1013 this.text.set_line_ending(proto::deserialize_line_ending(
1014 rpc::proto::LineEnding::from_i32(message.line_ending).context("missing line_ending")?,
1015 ));
1016 this.saved_version = proto::deserialize_version(&message.saved_version);
1017 this.saved_mtime = message.saved_mtime.map(|time| time.into());
1018 Ok(this)
1019 }
1020
1021 /// Serialize the buffer's state to a protobuf message.
1022 pub fn to_proto(&self, cx: &App) -> proto::BufferState {
1023 proto::BufferState {
1024 id: self.remote_id().into(),
1025 file: self.file.as_ref().map(|f| f.to_proto(cx)),
1026 base_text: self.base_text().to_string(),
1027 line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
1028 saved_version: proto::serialize_version(&self.saved_version),
1029 saved_mtime: self.saved_mtime.map(|time| time.into()),
1030 }
1031 }
1032
1033 /// Serialize as protobufs all of the changes to the buffer since the given version.
1034 pub fn serialize_ops(
1035 &self,
1036 since: Option<clock::Global>,
1037 cx: &App,
1038 ) -> Task<Vec<proto::Operation>> {
1039 let mut operations = Vec::new();
1040 operations.extend(self.deferred_ops.iter().map(proto::serialize_operation));
1041
1042 operations.extend(self.remote_selections.iter().map(|(_, set)| {
1043 proto::serialize_operation(&Operation::UpdateSelections {
1044 selections: set.selections.clone(),
1045 lamport_timestamp: set.lamport_timestamp,
1046 line_mode: set.line_mode,
1047 cursor_shape: set.cursor_shape,
1048 })
1049 }));
1050
1051 for (server_id, diagnostics) in &self.diagnostics {
1052 operations.push(proto::serialize_operation(&Operation::UpdateDiagnostics {
1053 lamport_timestamp: self.diagnostics_timestamp,
1054 server_id: *server_id,
1055 diagnostics: diagnostics.iter().cloned().collect(),
1056 }));
1057 }
1058
1059 for (server_id, completions) in &self.completion_triggers_per_language_server {
1060 operations.push(proto::serialize_operation(
1061 &Operation::UpdateCompletionTriggers {
1062 triggers: completions.iter().cloned().collect(),
1063 lamport_timestamp: self.completion_triggers_timestamp,
1064 server_id: *server_id,
1065 },
1066 ));
1067 }
1068
1069 let text_operations = self.text.operations().clone();
1070 cx.background_spawn(async move {
1071 let since = since.unwrap_or_default();
1072 operations.extend(
1073 text_operations
1074 .iter()
1075 .filter(|(_, op)| !since.observed(op.timestamp()))
1076 .map(|(_, op)| proto::serialize_operation(&Operation::Buffer(op.clone()))),
1077 );
1078 operations.sort_unstable_by_key(proto::lamport_timestamp_for_operation);
1079 operations
1080 })
1081 }
1082
1083 /// Assign a language to the buffer, returning the buffer.
1084 pub fn with_language_async(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1085 self.set_language_async(Some(language), cx);
1086 self
1087 }
1088
1089 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer, returning the buffer.
1090 #[ztracing::instrument(skip_all, fields(lang = language.config.name.0.as_str()))]
1091 pub fn with_language(mut self, language: Arc<Language>, cx: &mut Context<Self>) -> Self {
1092 self.set_language(Some(language), cx);
1093 self
1094 }
1095
1096 /// Returns the [`Capability`] of this buffer.
1097 pub fn capability(&self) -> Capability {
1098 self.capability
1099 }
1100
1101 /// Whether this buffer can only be read.
1102 pub fn read_only(&self) -> bool {
1103 !self.capability.editable()
1104 }
1105
1106 /// Builds a [`Buffer`] with the given underlying [`TextBuffer`], diff base, [`File`] and [`Capability`].
1107 pub fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>, capability: Capability) -> Self {
1108 let saved_mtime = file.as_ref().and_then(|file| file.disk_state().mtime());
1109 let snapshot = buffer.snapshot();
1110 let syntax_map = Mutex::new(SyntaxMap::new(&snapshot));
1111 let tree_sitter_data = TreeSitterData::new(snapshot);
1112 Self {
1113 saved_mtime,
1114 tree_sitter_data: Arc::new(tree_sitter_data),
1115 saved_version: buffer.version(),
1116 preview_version: buffer.version(),
1117 reload_task: None,
1118 transaction_depth: 0,
1119 was_dirty_before_starting_transaction: None,
1120 has_unsaved_edits: Cell::new((buffer.version(), false)),
1121 text: buffer,
1122 branch_state: None,
1123 file,
1124 capability,
1125 syntax_map,
1126 reparse: None,
1127 non_text_state_update_count: 0,
1128 sync_parse_timeout: if cfg!(any(test, feature = "test-support")) {
1129 Some(Duration::from_millis(10))
1130 } else {
1131 Some(Duration::from_millis(1))
1132 },
1133 parse_status: watch::channel(ParseStatus::Idle),
1134 autoindent_requests: Default::default(),
1135 wait_for_autoindent_txs: Default::default(),
1136 pending_autoindent: Default::default(),
1137 language: None,
1138 remote_selections: Default::default(),
1139 diagnostics: Default::default(),
1140 diagnostics_timestamp: Lamport::MIN,
1141 completion_triggers: Default::default(),
1142 completion_triggers_per_language_server: Default::default(),
1143 completion_triggers_timestamp: Lamport::MIN,
1144 deferred_ops: OperationQueue::new(),
1145 has_conflict: false,
1146 change_bits: Default::default(),
1147 _subscriptions: Vec::new(),
1148 encoding: encoding_rs::UTF_8,
1149 has_bom: false,
1150 }
1151 }
1152
1153 pub fn build_snapshot(
1154 text: Rope,
1155 language: Option<Arc<Language>>,
1156 language_registry: Option<Arc<LanguageRegistry>>,
1157 cx: &mut App,
1158 ) -> impl Future<Output = BufferSnapshot> + use<> {
1159 let entity_id = cx.reserve_entity::<Self>().entity_id();
1160 let buffer_id = entity_id.as_non_zero_u64().into();
1161 async move {
1162 let text =
1163 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1164 .snapshot();
1165 let mut syntax = SyntaxMap::new(&text).snapshot();
1166 if let Some(language) = language.clone() {
1167 let language_registry = language_registry.clone();
1168 syntax.reparse(&text, language_registry, language);
1169 }
1170 let tree_sitter_data = TreeSitterData::new(text.clone());
1171 BufferSnapshot {
1172 text,
1173 syntax,
1174 file: None,
1175 diagnostics: Default::default(),
1176 remote_selections: Default::default(),
1177 tree_sitter_data: Arc::new(tree_sitter_data),
1178 language,
1179 non_text_state_update_count: 0,
1180 capability: Capability::ReadOnly,
1181 }
1182 }
1183 }
1184
1185 pub fn build_empty_snapshot(cx: &mut App) -> BufferSnapshot {
1186 let entity_id = cx.reserve_entity::<Self>().entity_id();
1187 let buffer_id = entity_id.as_non_zero_u64().into();
1188 let text = TextBuffer::new_normalized(
1189 ReplicaId::LOCAL,
1190 buffer_id,
1191 Default::default(),
1192 Rope::new(),
1193 )
1194 .snapshot();
1195 let syntax = SyntaxMap::new(&text).snapshot();
1196 let tree_sitter_data = TreeSitterData::new(text.clone());
1197 BufferSnapshot {
1198 text,
1199 syntax,
1200 tree_sitter_data: Arc::new(tree_sitter_data),
1201 file: None,
1202 diagnostics: Default::default(),
1203 remote_selections: Default::default(),
1204 language: None,
1205 non_text_state_update_count: 0,
1206 capability: Capability::ReadOnly,
1207 }
1208 }
1209
1210 #[cfg(any(test, feature = "test-support"))]
1211 pub fn build_snapshot_sync(
1212 text: Rope,
1213 language: Option<Arc<Language>>,
1214 language_registry: Option<Arc<LanguageRegistry>>,
1215 cx: &mut App,
1216 ) -> BufferSnapshot {
1217 let entity_id = cx.reserve_entity::<Self>().entity_id();
1218 let buffer_id = entity_id.as_non_zero_u64().into();
1219 let text =
1220 TextBuffer::new_normalized(ReplicaId::LOCAL, buffer_id, Default::default(), text)
1221 .snapshot();
1222 let mut syntax = SyntaxMap::new(&text).snapshot();
1223 if let Some(language) = language.clone() {
1224 syntax.reparse(&text, language_registry, language);
1225 }
1226 let tree_sitter_data = TreeSitterData::new(text.clone());
1227 BufferSnapshot {
1228 text,
1229 syntax,
1230 tree_sitter_data: Arc::new(tree_sitter_data),
1231 file: None,
1232 diagnostics: Default::default(),
1233 remote_selections: Default::default(),
1234 language,
1235 non_text_state_update_count: 0,
1236 capability: Capability::ReadOnly,
1237 }
1238 }
1239
1240 /// Retrieve a snapshot of the buffer's current state. This is computationally
1241 /// cheap, and allows reading from the buffer on a background thread.
1242 pub fn snapshot(&self) -> BufferSnapshot {
1243 let text = self.text.snapshot();
1244 let mut syntax_map = self.syntax_map.lock();
1245 syntax_map.interpolate(&text);
1246 let syntax = syntax_map.snapshot();
1247
1248 let tree_sitter_data = if self.text.version() != *self.tree_sitter_data.version() {
1249 Arc::new(TreeSitterData::new(text.clone()))
1250 } else {
1251 self.tree_sitter_data.clone()
1252 };
1253
1254 BufferSnapshot {
1255 text,
1256 syntax,
1257 tree_sitter_data,
1258 file: self.file.clone(),
1259 remote_selections: self.remote_selections.clone(),
1260 diagnostics: self.diagnostics.clone(),
1261 language: self.language.clone(),
1262 non_text_state_update_count: self.non_text_state_update_count,
1263 capability: self.capability,
1264 }
1265 }
1266
1267 pub fn branch(&mut self, cx: &mut Context<Self>) -> Entity<Self> {
1268 let this = cx.entity();
1269 cx.new(|cx| {
1270 let mut branch = Self {
1271 branch_state: Some(BufferBranchState {
1272 base_buffer: this.clone(),
1273 merged_operations: Default::default(),
1274 }),
1275 language: self.language.clone(),
1276 has_conflict: self.has_conflict,
1277 has_unsaved_edits: Cell::new(self.has_unsaved_edits.get_mut().clone()),
1278 _subscriptions: vec![cx.subscribe(&this, Self::on_base_buffer_event)],
1279 ..Self::build(self.text.branch(), self.file.clone(), self.capability())
1280 };
1281 if let Some(language_registry) = self.language_registry() {
1282 branch.set_language_registry(language_registry);
1283 }
1284
1285 // Reparse the branch buffer so that we get syntax highlighting immediately.
1286 branch.reparse(cx, true);
1287
1288 branch
1289 })
1290 }
1291
1292 pub fn preview_edits(
1293 &self,
1294 edits: Arc<[(Range<Anchor>, Arc<str>)]>,
1295 cx: &App,
1296 ) -> Task<EditPreview> {
1297 let registry = self.language_registry();
1298 let language = self.language().cloned();
1299 let old_snapshot = self.text.snapshot();
1300 let mut branch_buffer = self.text.branch();
1301 let mut syntax_snapshot = self.syntax_map.lock().snapshot();
1302 cx.background_spawn(async move {
1303 if !edits.is_empty() {
1304 if let Some(language) = language.clone() {
1305 syntax_snapshot.reparse(&old_snapshot, registry.clone(), language);
1306 }
1307
1308 branch_buffer.edit(edits.iter().cloned());
1309 let snapshot = branch_buffer.snapshot();
1310 syntax_snapshot.interpolate(&snapshot);
1311
1312 if let Some(language) = language {
1313 syntax_snapshot.reparse(&snapshot, registry, language);
1314 }
1315 }
1316 EditPreview {
1317 old_snapshot,
1318 applied_edits_snapshot: branch_buffer.snapshot(),
1319 syntax_snapshot,
1320 }
1321 })
1322 }
1323
1324 /// Applies all of the changes in this buffer that intersect any of the
1325 /// given `ranges` to its base buffer.
1326 ///
1327 /// If `ranges` is empty, then all changes will be applied. This buffer must
1328 /// be a branch buffer to call this method.
1329 pub fn merge_into_base(&mut self, ranges: Vec<Range<usize>>, cx: &mut Context<Self>) {
1330 let Some(base_buffer) = self.base_buffer() else {
1331 debug_panic!("not a branch buffer");
1332 return;
1333 };
1334
1335 let mut ranges = if ranges.is_empty() {
1336 &[0..usize::MAX]
1337 } else {
1338 ranges.as_slice()
1339 }
1340 .iter()
1341 .peekable();
1342
1343 let mut edits = Vec::new();
1344 for edit in self.edits_since::<usize>(&base_buffer.read(cx).version()) {
1345 let mut is_included = false;
1346 while let Some(range) = ranges.peek() {
1347 if range.end < edit.new.start {
1348 ranges.next().unwrap();
1349 } else {
1350 if range.start <= edit.new.end {
1351 is_included = true;
1352 }
1353 break;
1354 }
1355 }
1356
1357 if is_included {
1358 edits.push((
1359 edit.old.clone(),
1360 self.text_for_range(edit.new.clone()).collect::<String>(),
1361 ));
1362 }
1363 }
1364
1365 let operation = base_buffer.update(cx, |base_buffer, cx| {
1366 // cx.emit(BufferEvent::DiffBaseChanged);
1367 base_buffer.edit(edits, None, cx)
1368 });
1369
1370 if let Some(operation) = operation
1371 && let Some(BufferBranchState {
1372 merged_operations, ..
1373 }) = &mut self.branch_state
1374 {
1375 merged_operations.push(operation);
1376 }
1377 }
1378
1379 fn on_base_buffer_event(
1380 &mut self,
1381 _: Entity<Buffer>,
1382 event: &BufferEvent,
1383 cx: &mut Context<Self>,
1384 ) {
1385 let BufferEvent::Operation { operation, .. } = event else {
1386 return;
1387 };
1388 let Some(BufferBranchState {
1389 merged_operations, ..
1390 }) = &mut self.branch_state
1391 else {
1392 return;
1393 };
1394
1395 let mut operation_to_undo = None;
1396 if let Operation::Buffer(text::Operation::Edit(operation)) = &operation
1397 && let Ok(ix) = merged_operations.binary_search(&operation.timestamp)
1398 {
1399 merged_operations.remove(ix);
1400 operation_to_undo = Some(operation.timestamp);
1401 }
1402
1403 self.apply_ops([operation.clone()], cx);
1404
1405 if let Some(timestamp) = operation_to_undo {
1406 let counts = [(timestamp, u32::MAX)].into_iter().collect();
1407 self.undo_operations(counts, cx);
1408 }
1409 }
1410
1411 #[cfg(test)]
1412 pub(crate) fn as_text_snapshot(&self) -> &text::BufferSnapshot {
1413 &self.text
1414 }
1415
1416 /// Retrieve a snapshot of the buffer's raw text, without any
1417 /// language-related state like the syntax tree or diagnostics.
1418 #[ztracing::instrument(skip_all)]
1419 pub fn text_snapshot(&self) -> text::BufferSnapshot {
1420 self.text.snapshot()
1421 }
1422
1423 /// The file associated with the buffer, if any.
1424 pub fn file(&self) -> Option<&Arc<dyn File>> {
1425 self.file.as_ref()
1426 }
1427
1428 /// The version of the buffer that was last saved or reloaded from disk.
1429 pub fn saved_version(&self) -> &clock::Global {
1430 &self.saved_version
1431 }
1432
1433 /// The mtime of the buffer's file when the buffer was last saved or reloaded from disk.
1434 pub fn saved_mtime(&self) -> Option<MTime> {
1435 self.saved_mtime
1436 }
1437
1438 /// Returns the character encoding of the buffer's file.
1439 pub fn encoding(&self) -> &'static Encoding {
1440 self.encoding
1441 }
1442
1443 /// Sets the character encoding of the buffer.
1444 pub fn set_encoding(&mut self, encoding: &'static Encoding) {
1445 self.encoding = encoding;
1446 }
1447
1448 /// Returns whether the buffer has a Byte Order Mark.
1449 pub fn has_bom(&self) -> bool {
1450 self.has_bom
1451 }
1452
1453 /// Sets whether the buffer has a Byte Order Mark.
1454 pub fn set_has_bom(&mut self, has_bom: bool) {
1455 self.has_bom = has_bom;
1456 }
1457
1458 /// Assign a language to the buffer.
1459 pub fn set_language_async(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1460 self.set_language_(language, cfg!(any(test, feature = "test-support")), cx);
1461 }
1462
1463 /// Assign a language to the buffer, blocking for up to 1ms to reparse the buffer.
1464 pub fn set_language(&mut self, language: Option<Arc<Language>>, cx: &mut Context<Self>) {
1465 self.set_language_(language, true, cx);
1466 }
1467
1468 #[ztracing::instrument(skip_all)]
1469 fn set_language_(
1470 &mut self,
1471 language: Option<Arc<Language>>,
1472 may_block: bool,
1473 cx: &mut Context<Self>,
1474 ) {
1475 self.non_text_state_update_count += 1;
1476 self.syntax_map.lock().clear(&self.text);
1477 let old_language = std::mem::replace(&mut self.language, language);
1478 self.was_changed();
1479 self.reparse(cx, may_block);
1480 let has_fresh_language =
1481 self.language.is_some() && old_language.is_none_or(|old| old == *PLAIN_TEXT);
1482 cx.emit(BufferEvent::LanguageChanged(has_fresh_language));
1483 }
1484
1485 /// Assign a language registry to the buffer. This allows the buffer to retrieve
1486 /// other languages if parts of the buffer are written in different languages.
1487 pub fn set_language_registry(&self, language_registry: Arc<LanguageRegistry>) {
1488 self.syntax_map
1489 .lock()
1490 .set_language_registry(language_registry);
1491 }
1492
1493 pub fn language_registry(&self) -> Option<Arc<LanguageRegistry>> {
1494 self.syntax_map.lock().language_registry()
1495 }
1496
1497 /// Assign the line ending type to the buffer.
1498 pub fn set_line_ending(&mut self, line_ending: LineEnding, cx: &mut Context<Self>) {
1499 self.text.set_line_ending(line_ending);
1500
1501 let lamport_timestamp = self.text.lamport_clock.tick();
1502 self.send_operation(
1503 Operation::UpdateLineEnding {
1504 line_ending,
1505 lamport_timestamp,
1506 },
1507 true,
1508 cx,
1509 );
1510 }
1511
1512 /// Assign the buffer a new [`Capability`].
1513 pub fn set_capability(&mut self, capability: Capability, cx: &mut Context<Self>) {
1514 if self.capability != capability {
1515 self.capability = capability;
1516 cx.emit(BufferEvent::CapabilityChanged)
1517 }
1518 }
1519
1520 /// This method is called to signal that the buffer has been saved.
1521 pub fn did_save(
1522 &mut self,
1523 version: clock::Global,
1524 mtime: Option<MTime>,
1525 cx: &mut Context<Self>,
1526 ) {
1527 self.saved_version = version.clone();
1528 self.has_unsaved_edits.set((version, false));
1529 self.has_conflict = false;
1530 self.saved_mtime = mtime;
1531 self.was_changed();
1532 cx.emit(BufferEvent::Saved);
1533 cx.notify();
1534 }
1535
1536 /// Reloads the contents of the buffer from disk.
1537 pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
1538 let (tx, rx) = futures::channel::oneshot::channel();
1539 let prev_version = self.text.version();
1540 self.reload_task = Some(cx.spawn(async move |this, cx| {
1541 let Some((new_mtime, load_bytes_task, encoding)) = this.update(cx, |this, cx| {
1542 let file = this.file.as_ref()?.as_local()?;
1543 Some((
1544 file.disk_state().mtime(),
1545 file.load_bytes(cx),
1546 this.encoding,
1547 ))
1548 })?
1549 else {
1550 return Ok(());
1551 };
1552
1553 let bytes = load_bytes_task.await?;
1554 let (cow, _encoding_used, _has_errors) = encoding.decode(&bytes);
1555 let new_text = cow.into_owned();
1556
1557 let diff = this.update(cx, |this, cx| this.diff(new_text, cx))?.await;
1558 this.update(cx, |this, cx| {
1559 if this.version() == diff.base_version {
1560 this.finalize_last_transaction();
1561 this.apply_diff(diff, cx);
1562 tx.send(this.finalize_last_transaction().cloned()).ok();
1563 this.has_conflict = false;
1564 this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
1565 } else {
1566 if !diff.edits.is_empty()
1567 || this
1568 .edits_since::<usize>(&diff.base_version)
1569 .next()
1570 .is_some()
1571 {
1572 this.has_conflict = true;
1573 }
1574
1575 this.did_reload(prev_version, this.line_ending(), this.saved_mtime, cx);
1576 }
1577
1578 this.reload_task.take();
1579 })
1580 }));
1581 rx
1582 }
1583
1584 /// This method is called to signal that the buffer has been reloaded.
1585 pub fn did_reload(
1586 &mut self,
1587 version: clock::Global,
1588 line_ending: LineEnding,
1589 mtime: Option<MTime>,
1590 cx: &mut Context<Self>,
1591 ) {
1592 self.saved_version = version;
1593 self.has_unsaved_edits
1594 .set((self.saved_version.clone(), false));
1595 self.text.set_line_ending(line_ending);
1596 self.saved_mtime = mtime;
1597 cx.emit(BufferEvent::Reloaded);
1598 cx.notify();
1599 }
1600
1601 /// Updates the [`File`] backing this buffer. This should be called when
1602 /// the file has changed or has been deleted.
1603 pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
1604 let was_dirty = self.is_dirty();
1605 let mut file_changed = false;
1606
1607 if let Some(old_file) = self.file.as_ref() {
1608 if new_file.path() != old_file.path() {
1609 file_changed = true;
1610 }
1611
1612 let old_state = old_file.disk_state();
1613 let new_state = new_file.disk_state();
1614 if old_state != new_state {
1615 file_changed = true;
1616 if !was_dirty && matches!(new_state, DiskState::Present { .. }) {
1617 cx.emit(BufferEvent::ReloadNeeded)
1618 }
1619 }
1620 } else {
1621 file_changed = true;
1622 };
1623
1624 self.file = Some(new_file);
1625 if file_changed {
1626 self.was_changed();
1627 self.non_text_state_update_count += 1;
1628 if was_dirty != self.is_dirty() {
1629 cx.emit(BufferEvent::DirtyChanged);
1630 }
1631 cx.emit(BufferEvent::FileHandleChanged);
1632 cx.notify();
1633 }
1634 }
1635
1636 pub fn base_buffer(&self) -> Option<Entity<Self>> {
1637 Some(self.branch_state.as_ref()?.base_buffer.clone())
1638 }
1639
1640 /// Returns the primary [`Language`] assigned to this [`Buffer`].
1641 pub fn language(&self) -> Option<&Arc<Language>> {
1642 self.language.as_ref()
1643 }
1644
1645 /// Returns the [`Language`] at the given location.
1646 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
1647 let offset = position.to_offset(self);
1648 let mut is_first = true;
1649 let start_anchor = self.anchor_before(offset);
1650 let end_anchor = self.anchor_after(offset);
1651 self.syntax_map
1652 .lock()
1653 .layers_for_range(offset..offset, &self.text, false)
1654 .filter(|layer| {
1655 if is_first {
1656 is_first = false;
1657 return true;
1658 }
1659
1660 layer
1661 .included_sub_ranges
1662 .map(|sub_ranges| {
1663 sub_ranges.iter().any(|sub_range| {
1664 let is_before_start = sub_range.end.cmp(&start_anchor, self).is_lt();
1665 let is_after_end = sub_range.start.cmp(&end_anchor, self).is_gt();
1666 !is_before_start && !is_after_end
1667 })
1668 })
1669 .unwrap_or(true)
1670 })
1671 .last()
1672 .map(|info| info.language.clone())
1673 .or_else(|| self.language.clone())
1674 }
1675
1676 /// Returns each [`Language`] for the active syntax layers at the given location.
1677 pub fn languages_at<D: ToOffset>(&self, position: D) -> Vec<Arc<Language>> {
1678 let offset = position.to_offset(self);
1679 let mut languages: Vec<Arc<Language>> = self
1680 .syntax_map
1681 .lock()
1682 .layers_for_range(offset..offset, &self.text, false)
1683 .map(|info| info.language.clone())
1684 .collect();
1685
1686 if languages.is_empty()
1687 && let Some(buffer_language) = self.language()
1688 {
1689 languages.push(buffer_language.clone());
1690 }
1691
1692 languages
1693 }
1694
1695 /// An integer version number that accounts for all updates besides
1696 /// the buffer's text itself (which is versioned via a version vector).
1697 pub fn non_text_state_update_count(&self) -> usize {
1698 self.non_text_state_update_count
1699 }
1700
1701 /// Whether the buffer is being parsed in the background.
1702 #[cfg(any(test, feature = "test-support"))]
1703 pub fn is_parsing(&self) -> bool {
1704 self.reparse.is_some()
1705 }
1706
1707 /// Indicates whether the buffer contains any regions that may be
1708 /// written in a language that hasn't been loaded yet.
1709 pub fn contains_unknown_injections(&self) -> bool {
1710 self.syntax_map.lock().contains_unknown_injections()
1711 }
1712
1713 #[cfg(any(test, feature = "test-support"))]
1714 pub fn set_sync_parse_timeout(&mut self, timeout: Option<Duration>) {
1715 self.sync_parse_timeout = timeout;
1716 }
1717
1718 fn invalidate_tree_sitter_data(&mut self, snapshot: text::BufferSnapshot) {
1719 match Arc::get_mut(&mut self.tree_sitter_data) {
1720 Some(tree_sitter_data) => tree_sitter_data.clear(snapshot),
1721 None => {
1722 let tree_sitter_data = TreeSitterData::new(snapshot);
1723 self.tree_sitter_data = Arc::new(tree_sitter_data)
1724 }
1725 }
1726 }
1727
1728 /// Called after an edit to synchronize the buffer's main parse tree with
1729 /// the buffer's new underlying state.
1730 ///
1731 /// Locks the syntax map and interpolates the edits since the last reparse
1732 /// into the foreground syntax tree.
1733 ///
1734 /// Then takes a stable snapshot of the syntax map before unlocking it.
1735 /// The snapshot with the interpolated edits is sent to a background thread,
1736 /// where we ask Tree-sitter to perform an incremental parse.
1737 ///
1738 /// Meanwhile, in the foreground if `may_block` is true, we block the main
1739 /// thread for up to 1ms waiting on the parse to complete. As soon as it
1740 /// completes, we proceed synchronously, unless a 1ms timeout elapses.
1741 ///
1742 /// If we time out waiting on the parse, we spawn a second task waiting
1743 /// until the parse does complete and return with the interpolated tree still
1744 /// in the foreground. When the background parse completes, call back into
1745 /// the main thread and assign the foreground parse state.
1746 ///
1747 /// If the buffer or grammar changed since the start of the background parse,
1748 /// initiate an additional reparse recursively. To avoid concurrent parses
1749 /// for the same buffer, we only initiate a new parse if we are not already
1750 /// parsing in the background.
1751 #[ztracing::instrument(skip_all)]
1752 pub fn reparse(&mut self, cx: &mut Context<Self>, may_block: bool) {
1753 if self.text.version() != *self.tree_sitter_data.version() {
1754 self.invalidate_tree_sitter_data(self.text.snapshot());
1755 }
1756 if self.reparse.is_some() {
1757 return;
1758 }
1759 let language = if let Some(language) = self.language.clone() {
1760 language
1761 } else {
1762 return;
1763 };
1764
1765 let text = self.text_snapshot();
1766 let parsed_version = self.version();
1767
1768 let mut syntax_map = self.syntax_map.lock();
1769 syntax_map.interpolate(&text);
1770 let language_registry = syntax_map.language_registry();
1771 let mut syntax_snapshot = syntax_map.snapshot();
1772 drop(syntax_map);
1773
1774 self.parse_status.0.send(ParseStatus::Parsing).unwrap();
1775 if may_block && let Some(sync_parse_timeout) = self.sync_parse_timeout {
1776 if let Ok(()) = syntax_snapshot.reparse_with_timeout(
1777 &text,
1778 language_registry.clone(),
1779 language.clone(),
1780 sync_parse_timeout,
1781 ) {
1782 self.did_finish_parsing(syntax_snapshot, Duration::from_millis(300), cx);
1783 self.reparse = None;
1784 return;
1785 }
1786 }
1787
1788 let parse_task = cx.background_spawn({
1789 let language = language.clone();
1790 let language_registry = language_registry.clone();
1791 async move {
1792 syntax_snapshot.reparse(&text, language_registry, language);
1793 syntax_snapshot
1794 }
1795 });
1796
1797 self.reparse = Some(cx.spawn(async move |this, cx| {
1798 let new_syntax_map = parse_task.await;
1799 this.update(cx, move |this, cx| {
1800 let grammar_changed = || {
1801 this.language
1802 .as_ref()
1803 .is_none_or(|current_language| !Arc::ptr_eq(&language, current_language))
1804 };
1805 let language_registry_changed = || {
1806 new_syntax_map.contains_unknown_injections()
1807 && language_registry.is_some_and(|registry| {
1808 registry.version() != new_syntax_map.language_registry_version()
1809 })
1810 };
1811 let parse_again = this.version.changed_since(&parsed_version)
1812 || language_registry_changed()
1813 || grammar_changed();
1814 this.did_finish_parsing(new_syntax_map, Duration::ZERO, cx);
1815 this.reparse = None;
1816 if parse_again {
1817 this.reparse(cx, false);
1818 }
1819 })
1820 .ok();
1821 }));
1822 }
1823
1824 fn did_finish_parsing(
1825 &mut self,
1826 syntax_snapshot: SyntaxSnapshot,
1827 block_budget: Duration,
1828 cx: &mut Context<Self>,
1829 ) {
1830 self.non_text_state_update_count += 1;
1831 self.syntax_map.lock().did_parse(syntax_snapshot);
1832 self.was_changed();
1833 self.request_autoindent(cx, block_budget);
1834 self.parse_status.0.send(ParseStatus::Idle).unwrap();
1835 self.invalidate_tree_sitter_data(self.text.snapshot());
1836 cx.emit(BufferEvent::Reparsed);
1837 cx.notify();
1838 }
1839
1840 pub fn parse_status(&self) -> watch::Receiver<ParseStatus> {
1841 self.parse_status.1.clone()
1842 }
1843
1844 /// Wait until the buffer is no longer parsing
1845 pub fn parsing_idle(&self) -> impl Future<Output = ()> + use<> {
1846 let mut parse_status = self.parse_status();
1847 async move {
1848 while *parse_status.borrow() != ParseStatus::Idle {
1849 if parse_status.changed().await.is_err() {
1850 break;
1851 }
1852 }
1853 }
1854 }
1855
1856 /// Assign to the buffer a set of diagnostics created by a given language server.
1857 pub fn update_diagnostics(
1858 &mut self,
1859 server_id: LanguageServerId,
1860 diagnostics: DiagnosticSet,
1861 cx: &mut Context<Self>,
1862 ) {
1863 let lamport_timestamp = self.text.lamport_clock.tick();
1864 let op = Operation::UpdateDiagnostics {
1865 server_id,
1866 diagnostics: diagnostics.iter().cloned().collect(),
1867 lamport_timestamp,
1868 };
1869
1870 self.apply_diagnostic_update(server_id, diagnostics, lamport_timestamp, cx);
1871 self.send_operation(op, true, cx);
1872 }
1873
1874 pub fn buffer_diagnostics(
1875 &self,
1876 for_server: Option<LanguageServerId>,
1877 ) -> Vec<&DiagnosticEntry<Anchor>> {
1878 match for_server {
1879 Some(server_id) => match self.diagnostics.binary_search_by_key(&server_id, |v| v.0) {
1880 Ok(idx) => self.diagnostics[idx].1.iter().collect(),
1881 Err(_) => Vec::new(),
1882 },
1883 None => self
1884 .diagnostics
1885 .iter()
1886 .flat_map(|(_, diagnostic_set)| diagnostic_set.iter())
1887 .collect(),
1888 }
1889 }
1890
1891 fn request_autoindent(&mut self, cx: &mut Context<Self>, block_budget: Duration) {
1892 if let Some(indent_sizes) = self.compute_autoindents() {
1893 let indent_sizes = cx.background_spawn(indent_sizes);
1894 match cx
1895 .foreground_executor()
1896 .block_with_timeout(block_budget, indent_sizes)
1897 {
1898 Ok(indent_sizes) => self.apply_autoindents(indent_sizes, cx),
1899 Err(indent_sizes) => {
1900 self.pending_autoindent = Some(cx.spawn(async move |this, cx| {
1901 let indent_sizes = indent_sizes.await;
1902 this.update(cx, |this, cx| {
1903 this.apply_autoindents(indent_sizes, cx);
1904 })
1905 .ok();
1906 }));
1907 }
1908 }
1909 } else {
1910 self.autoindent_requests.clear();
1911 for tx in self.wait_for_autoindent_txs.drain(..) {
1912 tx.send(()).ok();
1913 }
1914 }
1915 }
1916
1917 fn compute_autoindents(
1918 &self,
1919 ) -> Option<impl Future<Output = BTreeMap<u32, IndentSize>> + use<>> {
1920 let max_rows_between_yields = 100;
1921 let snapshot = self.snapshot();
1922 if snapshot.syntax.is_empty() || self.autoindent_requests.is_empty() {
1923 return None;
1924 }
1925
1926 let autoindent_requests = self.autoindent_requests.clone();
1927 Some(async move {
1928 let mut indent_sizes = BTreeMap::<u32, (IndentSize, bool)>::new();
1929 for request in autoindent_requests {
1930 // Resolve each edited range to its row in the current buffer and in the
1931 // buffer before this batch of edits.
1932 let mut row_ranges = Vec::new();
1933 let mut old_to_new_rows = BTreeMap::new();
1934 let mut language_indent_sizes_by_new_row = Vec::new();
1935 for entry in &request.entries {
1936 let position = entry.range.start;
1937 let new_row = position.to_point(&snapshot).row;
1938 let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
1939 language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
1940
1941 if let Some(old_row) = entry.old_row {
1942 old_to_new_rows.insert(old_row, new_row);
1943 }
1944 row_ranges.push((new_row..new_end_row, entry.original_indent_column));
1945 }
1946
1947 // Build a map containing the suggested indentation for each of the edited lines
1948 // with respect to the state of the buffer before these edits. This map is keyed
1949 // by the rows for these lines in the current state of the buffer.
1950 let mut old_suggestions = BTreeMap::<u32, (IndentSize, bool)>::default();
1951 let old_edited_ranges =
1952 contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
1953 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1954 let mut language_indent_size = IndentSize::default();
1955 for old_edited_range in old_edited_ranges {
1956 let suggestions = request
1957 .before_edit
1958 .suggest_autoindents(old_edited_range.clone())
1959 .into_iter()
1960 .flatten();
1961 for (old_row, suggestion) in old_edited_range.zip(suggestions) {
1962 if let Some(suggestion) = suggestion {
1963 let new_row = *old_to_new_rows.get(&old_row).unwrap();
1964
1965 // Find the indent size based on the language for this row.
1966 while let Some((row, size)) = language_indent_sizes.peek() {
1967 if *row > new_row {
1968 break;
1969 }
1970 language_indent_size = *size;
1971 language_indent_sizes.next();
1972 }
1973
1974 let suggested_indent = old_to_new_rows
1975 .get(&suggestion.basis_row)
1976 .and_then(|from_row| {
1977 Some(old_suggestions.get(from_row).copied()?.0)
1978 })
1979 .unwrap_or_else(|| {
1980 request
1981 .before_edit
1982 .indent_size_for_line(suggestion.basis_row)
1983 })
1984 .with_delta(suggestion.delta, language_indent_size);
1985 old_suggestions
1986 .insert(new_row, (suggested_indent, suggestion.within_error));
1987 }
1988 }
1989 yield_now().await;
1990 }
1991
1992 // Compute new suggestions for each line, but only include them in the result
1993 // if they differ from the old suggestion for that line.
1994 let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
1995 let mut language_indent_size = IndentSize::default();
1996 for (row_range, original_indent_column) in row_ranges {
1997 let new_edited_row_range = if request.is_block_mode {
1998 row_range.start..row_range.start + 1
1999 } else {
2000 row_range.clone()
2001 };
2002
2003 let suggestions = snapshot
2004 .suggest_autoindents(new_edited_row_range.clone())
2005 .into_iter()
2006 .flatten();
2007 for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
2008 if let Some(suggestion) = suggestion {
2009 // Find the indent size based on the language for this row.
2010 while let Some((row, size)) = language_indent_sizes.peek() {
2011 if *row > new_row {
2012 break;
2013 }
2014 language_indent_size = *size;
2015 language_indent_sizes.next();
2016 }
2017
2018 let suggested_indent = indent_sizes
2019 .get(&suggestion.basis_row)
2020 .copied()
2021 .map(|e| e.0)
2022 .unwrap_or_else(|| {
2023 snapshot.indent_size_for_line(suggestion.basis_row)
2024 })
2025 .with_delta(suggestion.delta, language_indent_size);
2026
2027 if old_suggestions.get(&new_row).is_none_or(
2028 |(old_indentation, was_within_error)| {
2029 suggested_indent != *old_indentation
2030 && (!suggestion.within_error || *was_within_error)
2031 },
2032 ) {
2033 indent_sizes.insert(
2034 new_row,
2035 (suggested_indent, request.ignore_empty_lines),
2036 );
2037 }
2038 }
2039 }
2040
2041 if let (true, Some(original_indent_column)) =
2042 (request.is_block_mode, original_indent_column)
2043 {
2044 let new_indent =
2045 if let Some((indent, _)) = indent_sizes.get(&row_range.start) {
2046 *indent
2047 } else {
2048 snapshot.indent_size_for_line(row_range.start)
2049 };
2050 let delta = new_indent.len as i64 - original_indent_column as i64;
2051 if delta != 0 {
2052 for row in row_range.skip(1) {
2053 indent_sizes.entry(row).or_insert_with(|| {
2054 let mut size = snapshot.indent_size_for_line(row);
2055 if size.kind == new_indent.kind {
2056 match delta.cmp(&0) {
2057 Ordering::Greater => size.len += delta as u32,
2058 Ordering::Less => {
2059 size.len = size.len.saturating_sub(-delta as u32)
2060 }
2061 Ordering::Equal => {}
2062 }
2063 }
2064 (size, request.ignore_empty_lines)
2065 });
2066 }
2067 }
2068 }
2069
2070 yield_now().await;
2071 }
2072 }
2073
2074 indent_sizes
2075 .into_iter()
2076 .filter_map(|(row, (indent, ignore_empty_lines))| {
2077 if ignore_empty_lines && snapshot.line_len(row) == 0 {
2078 None
2079 } else {
2080 Some((row, indent))
2081 }
2082 })
2083 .collect()
2084 })
2085 }
2086
2087 fn apply_autoindents(
2088 &mut self,
2089 indent_sizes: BTreeMap<u32, IndentSize>,
2090 cx: &mut Context<Self>,
2091 ) {
2092 self.autoindent_requests.clear();
2093 for tx in self.wait_for_autoindent_txs.drain(..) {
2094 tx.send(()).ok();
2095 }
2096
2097 let edits: Vec<_> = indent_sizes
2098 .into_iter()
2099 .filter_map(|(row, indent_size)| {
2100 let current_size = indent_size_for_line(self, row);
2101 Self::edit_for_indent_size_adjustment(row, current_size, indent_size)
2102 })
2103 .collect();
2104
2105 let preserve_preview = self.preserve_preview();
2106 self.edit(edits, None, cx);
2107 if preserve_preview {
2108 self.refresh_preview();
2109 }
2110 }
2111
2112 /// Create a minimal edit that will cause the given row to be indented
2113 /// with the given size. After applying this edit, the length of the line
2114 /// will always be at least `new_size.len`.
2115 pub fn edit_for_indent_size_adjustment(
2116 row: u32,
2117 current_size: IndentSize,
2118 new_size: IndentSize,
2119 ) -> Option<(Range<Point>, String)> {
2120 if new_size.kind == current_size.kind {
2121 match new_size.len.cmp(¤t_size.len) {
2122 Ordering::Greater => {
2123 let point = Point::new(row, 0);
2124 Some((
2125 point..point,
2126 iter::repeat(new_size.char())
2127 .take((new_size.len - current_size.len) as usize)
2128 .collect::<String>(),
2129 ))
2130 }
2131
2132 Ordering::Less => Some((
2133 Point::new(row, 0)..Point::new(row, current_size.len - new_size.len),
2134 String::new(),
2135 )),
2136
2137 Ordering::Equal => None,
2138 }
2139 } else {
2140 Some((
2141 Point::new(row, 0)..Point::new(row, current_size.len),
2142 iter::repeat(new_size.char())
2143 .take(new_size.len as usize)
2144 .collect::<String>(),
2145 ))
2146 }
2147 }
2148
2149 /// Spawns a background task that asynchronously computes a `Diff` between the buffer's text
2150 /// and the given new text.
2151 pub fn diff(&self, mut new_text: String, cx: &App) -> Task<Diff> {
2152 let old_text = self.as_rope().clone();
2153 let base_version = self.version();
2154 cx.background_spawn(async move {
2155 let old_text = old_text.to_string();
2156 let line_ending = LineEnding::detect(&new_text);
2157 LineEnding::normalize(&mut new_text);
2158 let edits = text_diff(&old_text, &new_text);
2159 Diff {
2160 base_version,
2161 line_ending,
2162 edits,
2163 }
2164 })
2165 }
2166
2167 /// Spawns a background task that searches the buffer for any whitespace
2168 /// at the ends of a lines, and returns a `Diff` that removes that whitespace.
2169 pub fn remove_trailing_whitespace(&self, cx: &App) -> Task<Diff> {
2170 let old_text = self.as_rope().clone();
2171 let line_ending = self.line_ending();
2172 let base_version = self.version();
2173 cx.background_spawn(async move {
2174 let ranges = trailing_whitespace_ranges(&old_text);
2175 let empty = Arc::<str>::from("");
2176 Diff {
2177 base_version,
2178 line_ending,
2179 edits: ranges
2180 .into_iter()
2181 .map(|range| (range, empty.clone()))
2182 .collect(),
2183 }
2184 })
2185 }
2186
2187 /// Ensures that the buffer ends with a single newline character, and
2188 /// no other whitespace. Skips if the buffer is empty.
2189 pub fn ensure_final_newline(&mut self, cx: &mut Context<Self>) {
2190 let len = self.len();
2191 if len == 0 {
2192 return;
2193 }
2194 let mut offset = len;
2195 for chunk in self.as_rope().reversed_chunks_in_range(0..len) {
2196 let non_whitespace_len = chunk
2197 .trim_end_matches(|c: char| c.is_ascii_whitespace())
2198 .len();
2199 offset -= chunk.len();
2200 offset += non_whitespace_len;
2201 if non_whitespace_len != 0 {
2202 if offset == len - 1 && chunk.get(non_whitespace_len..) == Some("\n") {
2203 return;
2204 }
2205 break;
2206 }
2207 }
2208 self.edit([(offset..len, "\n")], None, cx);
2209 }
2210
2211 /// Applies a diff to the buffer. If the buffer has changed since the given diff was
2212 /// calculated, then adjust the diff to account for those changes, and discard any
2213 /// parts of the diff that conflict with those changes.
2214 pub fn apply_diff(&mut self, diff: Diff, cx: &mut Context<Self>) -> Option<TransactionId> {
2215 let snapshot = self.snapshot();
2216 let mut edits_since = snapshot.edits_since::<usize>(&diff.base_version).peekable();
2217 let mut delta = 0;
2218 let adjusted_edits = diff.edits.into_iter().filter_map(|(range, new_text)| {
2219 while let Some(edit_since) = edits_since.peek() {
2220 // If the edit occurs after a diff hunk, then it does not
2221 // affect that hunk.
2222 if edit_since.old.start > range.end {
2223 break;
2224 }
2225 // If the edit precedes the diff hunk, then adjust the hunk
2226 // to reflect the edit.
2227 else if edit_since.old.end < range.start {
2228 delta += edit_since.new_len() as i64 - edit_since.old_len() as i64;
2229 edits_since.next();
2230 }
2231 // If the edit intersects a diff hunk, then discard that hunk.
2232 else {
2233 return None;
2234 }
2235 }
2236
2237 let start = (range.start as i64 + delta) as usize;
2238 let end = (range.end as i64 + delta) as usize;
2239 Some((start..end, new_text))
2240 });
2241
2242 self.start_transaction();
2243 self.text.set_line_ending(diff.line_ending);
2244 self.edit(adjusted_edits, None, cx);
2245 self.end_transaction(cx)
2246 }
2247
2248 pub fn has_unsaved_edits(&self) -> bool {
2249 let (last_version, has_unsaved_edits) = self.has_unsaved_edits.take();
2250
2251 if last_version == self.version {
2252 self.has_unsaved_edits
2253 .set((last_version, has_unsaved_edits));
2254 return has_unsaved_edits;
2255 }
2256
2257 let has_edits = self.has_edits_since(&self.saved_version);
2258 self.has_unsaved_edits
2259 .set((self.version.clone(), has_edits));
2260 has_edits
2261 }
2262
2263 /// Checks if the buffer has unsaved changes.
2264 pub fn is_dirty(&self) -> bool {
2265 if self.capability == Capability::ReadOnly {
2266 return false;
2267 }
2268 if self.has_conflict {
2269 return true;
2270 }
2271 match self.file.as_ref().map(|f| f.disk_state()) {
2272 Some(DiskState::New) | Some(DiskState::Deleted) => {
2273 !self.is_empty() && self.has_unsaved_edits()
2274 }
2275 _ => self.has_unsaved_edits(),
2276 }
2277 }
2278
2279 /// Marks the buffer as having a conflict regardless of current buffer state.
2280 pub fn set_conflict(&mut self) {
2281 self.has_conflict = true;
2282 }
2283
2284 /// Checks if the buffer and its file have both changed since the buffer
2285 /// was last saved or reloaded.
2286 pub fn has_conflict(&self) -> bool {
2287 if self.has_conflict {
2288 return true;
2289 }
2290 let Some(file) = self.file.as_ref() else {
2291 return false;
2292 };
2293 match file.disk_state() {
2294 DiskState::New => false,
2295 DiskState::Present { mtime } => match self.saved_mtime {
2296 Some(saved_mtime) => {
2297 mtime.bad_is_greater_than(saved_mtime) && self.has_unsaved_edits()
2298 }
2299 None => true,
2300 },
2301 DiskState::Deleted => false,
2302 DiskState::Historic { .. } => false,
2303 }
2304 }
2305
2306 /// Gets a [`Subscription`] that tracks all of the changes to the buffer's text.
2307 pub fn subscribe(&mut self) -> Subscription<usize> {
2308 self.text.subscribe()
2309 }
2310
2311 /// Adds a bit to the list of bits that are set when the buffer's text changes.
2312 ///
2313 /// This allows downstream code to check if the buffer's text has changed without
2314 /// waiting for an effect cycle, which would be required if using eents.
2315 pub fn record_changes(&mut self, bit: rc::Weak<Cell<bool>>) {
2316 if let Err(ix) = self
2317 .change_bits
2318 .binary_search_by_key(&rc::Weak::as_ptr(&bit), rc::Weak::as_ptr)
2319 {
2320 self.change_bits.insert(ix, bit);
2321 }
2322 }
2323
2324 /// Set the change bit for all "listeners".
2325 fn was_changed(&mut self) {
2326 self.change_bits.retain(|change_bit| {
2327 change_bit
2328 .upgrade()
2329 .inspect(|bit| {
2330 _ = bit.replace(true);
2331 })
2332 .is_some()
2333 });
2334 }
2335
2336 /// Starts a transaction, if one is not already in-progress. When undoing or
2337 /// redoing edits, all of the edits performed within a transaction are undone
2338 /// or redone together.
2339 pub fn start_transaction(&mut self) -> Option<TransactionId> {
2340 self.start_transaction_at(Instant::now())
2341 }
2342
2343 /// Starts a transaction, providing the current time. Subsequent transactions
2344 /// that occur within a short period of time will be grouped together. This
2345 /// is controlled by the buffer's undo grouping duration.
2346 pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
2347 self.transaction_depth += 1;
2348 if self.was_dirty_before_starting_transaction.is_none() {
2349 self.was_dirty_before_starting_transaction = Some(self.is_dirty());
2350 }
2351 self.text.start_transaction_at(now)
2352 }
2353
2354 /// Terminates the current transaction, if this is the outermost transaction.
2355 pub fn end_transaction(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
2356 self.end_transaction_at(Instant::now(), cx)
2357 }
2358
2359 /// Terminates the current transaction, providing the current time. Subsequent transactions
2360 /// that occur within a short period of time will be grouped together. This
2361 /// is controlled by the buffer's undo grouping duration.
2362 pub fn end_transaction_at(
2363 &mut self,
2364 now: Instant,
2365 cx: &mut Context<Self>,
2366 ) -> Option<TransactionId> {
2367 assert!(self.transaction_depth > 0);
2368 self.transaction_depth -= 1;
2369 let was_dirty = if self.transaction_depth == 0 {
2370 self.was_dirty_before_starting_transaction.take().unwrap()
2371 } else {
2372 false
2373 };
2374 if let Some((transaction_id, start_version)) = self.text.end_transaction_at(now) {
2375 self.did_edit(&start_version, was_dirty, cx);
2376 Some(transaction_id)
2377 } else {
2378 None
2379 }
2380 }
2381
2382 /// Manually add a transaction to the buffer's undo history.
2383 pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
2384 self.text.push_transaction(transaction, now);
2385 }
2386
2387 /// Differs from `push_transaction` in that it does not clear the redo
2388 /// stack. Intended to be used to create a parent transaction to merge
2389 /// potential child transactions into.
2390 ///
2391 /// The caller is responsible for removing it from the undo history using
2392 /// `forget_transaction` if no edits are merged into it. Otherwise, if edits
2393 /// are merged into this transaction, the caller is responsible for ensuring
2394 /// the redo stack is cleared. The easiest way to ensure the redo stack is
2395 /// cleared is to create transactions with the usual `start_transaction` and
2396 /// `end_transaction` methods and merging the resulting transactions into
2397 /// the transaction created by this method
2398 pub fn push_empty_transaction(&mut self, now: Instant) -> TransactionId {
2399 self.text.push_empty_transaction(now)
2400 }
2401
2402 /// Prevent the last transaction from being grouped with any subsequent transactions,
2403 /// even if they occur with the buffer's undo grouping duration.
2404 pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
2405 self.text.finalize_last_transaction()
2406 }
2407
2408 /// Manually group all changes since a given transaction.
2409 pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
2410 self.text.group_until_transaction(transaction_id);
2411 }
2412
2413 /// Manually remove a transaction from the buffer's undo history
2414 pub fn forget_transaction(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
2415 self.text.forget_transaction(transaction_id)
2416 }
2417
2418 /// Retrieve a transaction from the buffer's undo history
2419 pub fn get_transaction(&self, transaction_id: TransactionId) -> Option<&Transaction> {
2420 self.text.get_transaction(transaction_id)
2421 }
2422
2423 /// Manually merge two transactions in the buffer's undo history.
2424 pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
2425 self.text.merge_transactions(transaction, destination);
2426 }
2427
2428 /// Waits for the buffer to receive operations with the given timestamps.
2429 pub fn wait_for_edits<It: IntoIterator<Item = clock::Lamport>>(
2430 &mut self,
2431 edit_ids: It,
2432 ) -> impl Future<Output = Result<()>> + use<It> {
2433 self.text.wait_for_edits(edit_ids)
2434 }
2435
2436 /// Waits for the buffer to receive the operations necessary for resolving the given anchors.
2437 pub fn wait_for_anchors<It: IntoIterator<Item = Anchor>>(
2438 &mut self,
2439 anchors: It,
2440 ) -> impl 'static + Future<Output = Result<()>> + use<It> {
2441 self.text.wait_for_anchors(anchors)
2442 }
2443
2444 /// Waits for the buffer to receive operations up to the given version.
2445 pub fn wait_for_version(
2446 &mut self,
2447 version: clock::Global,
2448 ) -> impl Future<Output = Result<()>> + use<> {
2449 self.text.wait_for_version(version)
2450 }
2451
2452 /// Forces all futures returned by [`Buffer::wait_for_version`], [`Buffer::wait_for_edits`], or
2453 /// [`Buffer::wait_for_version`] to resolve with an error.
2454 pub fn give_up_waiting(&mut self) {
2455 self.text.give_up_waiting();
2456 }
2457
2458 pub fn wait_for_autoindent_applied(&mut self) -> Option<oneshot::Receiver<()>> {
2459 let mut rx = None;
2460 if !self.autoindent_requests.is_empty() {
2461 let channel = oneshot::channel();
2462 self.wait_for_autoindent_txs.push(channel.0);
2463 rx = Some(channel.1);
2464 }
2465 rx
2466 }
2467
2468 /// Stores a set of selections that should be broadcasted to all of the buffer's replicas.
2469 pub fn set_active_selections(
2470 &mut self,
2471 selections: Arc<[Selection<Anchor>]>,
2472 line_mode: bool,
2473 cursor_shape: CursorShape,
2474 cx: &mut Context<Self>,
2475 ) {
2476 let lamport_timestamp = self.text.lamport_clock.tick();
2477 self.remote_selections.insert(
2478 self.text.replica_id(),
2479 SelectionSet {
2480 selections: selections.clone(),
2481 lamport_timestamp,
2482 line_mode,
2483 cursor_shape,
2484 },
2485 );
2486 self.send_operation(
2487 Operation::UpdateSelections {
2488 selections,
2489 line_mode,
2490 lamport_timestamp,
2491 cursor_shape,
2492 },
2493 true,
2494 cx,
2495 );
2496 self.non_text_state_update_count += 1;
2497 cx.notify();
2498 }
2499
2500 /// Clears the selections, so that other replicas of the buffer do not see any selections for
2501 /// this replica.
2502 pub fn remove_active_selections(&mut self, cx: &mut Context<Self>) {
2503 if self
2504 .remote_selections
2505 .get(&self.text.replica_id())
2506 .is_none_or(|set| !set.selections.is_empty())
2507 {
2508 self.set_active_selections(Arc::default(), false, Default::default(), cx);
2509 }
2510 }
2511
2512 pub fn set_agent_selections(
2513 &mut self,
2514 selections: Arc<[Selection<Anchor>]>,
2515 line_mode: bool,
2516 cursor_shape: CursorShape,
2517 cx: &mut Context<Self>,
2518 ) {
2519 let lamport_timestamp = self.text.lamport_clock.tick();
2520 self.remote_selections.insert(
2521 ReplicaId::AGENT,
2522 SelectionSet {
2523 selections,
2524 lamport_timestamp,
2525 line_mode,
2526 cursor_shape,
2527 },
2528 );
2529 self.non_text_state_update_count += 1;
2530 cx.notify();
2531 }
2532
2533 pub fn remove_agent_selections(&mut self, cx: &mut Context<Self>) {
2534 self.set_agent_selections(Arc::default(), false, Default::default(), cx);
2535 }
2536
2537 /// Replaces the buffer's entire text.
2538 pub fn set_text<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2539 where
2540 T: Into<Arc<str>>,
2541 {
2542 self.autoindent_requests.clear();
2543 self.edit([(0..self.len(), text)], None, cx)
2544 }
2545
2546 /// Appends the given text to the end of the buffer.
2547 pub fn append<T>(&mut self, text: T, cx: &mut Context<Self>) -> Option<clock::Lamport>
2548 where
2549 T: Into<Arc<str>>,
2550 {
2551 self.edit([(self.len()..self.len(), text)], None, cx)
2552 }
2553
2554 /// Applies the given edits to the buffer. Each edit is specified as a range of text to
2555 /// delete, and a string of text to insert at that location. Adjacent edits are coalesced.
2556 ///
2557 /// If an [`AutoindentMode`] is provided, then the buffer will enqueue an auto-indent
2558 /// request for the edited ranges, which will be processed when the buffer finishes
2559 /// parsing.
2560 ///
2561 /// Parsing takes place at the end of a transaction, and may compute synchronously
2562 /// or asynchronously, depending on the changes.
2563 pub fn edit<I, S, T>(
2564 &mut self,
2565 edits_iter: I,
2566 autoindent_mode: Option<AutoindentMode>,
2567 cx: &mut Context<Self>,
2568 ) -> Option<clock::Lamport>
2569 where
2570 I: IntoIterator<Item = (Range<S>, T)>,
2571 S: ToOffset,
2572 T: Into<Arc<str>>,
2573 {
2574 self.edit_internal(edits_iter, autoindent_mode, true, cx)
2575 }
2576
2577 /// Like [`edit`](Self::edit), but does not coalesce adjacent edits.
2578 pub fn edit_non_coalesce<I, S, T>(
2579 &mut self,
2580 edits_iter: I,
2581 autoindent_mode: Option<AutoindentMode>,
2582 cx: &mut Context<Self>,
2583 ) -> Option<clock::Lamport>
2584 where
2585 I: IntoIterator<Item = (Range<S>, T)>,
2586 S: ToOffset,
2587 T: Into<Arc<str>>,
2588 {
2589 self.edit_internal(edits_iter, autoindent_mode, false, cx)
2590 }
2591
2592 fn edit_internal<I, S, T>(
2593 &mut self,
2594 edits_iter: I,
2595 autoindent_mode: Option<AutoindentMode>,
2596 coalesce_adjacent: bool,
2597 cx: &mut Context<Self>,
2598 ) -> Option<clock::Lamport>
2599 where
2600 I: IntoIterator<Item = (Range<S>, T)>,
2601 S: ToOffset,
2602 T: Into<Arc<str>>,
2603 {
2604 // Skip invalid edits and coalesce contiguous ones.
2605 let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
2606
2607 for (range, new_text) in edits_iter {
2608 let mut range = range.start.to_offset(self)..range.end.to_offset(self);
2609
2610 if range.start > range.end {
2611 mem::swap(&mut range.start, &mut range.end);
2612 }
2613 let new_text = new_text.into();
2614 if !new_text.is_empty() || !range.is_empty() {
2615 let prev_edit = edits.last_mut();
2616 let should_coalesce = prev_edit.as_ref().is_some_and(|(prev_range, _)| {
2617 if coalesce_adjacent {
2618 prev_range.end >= range.start
2619 } else {
2620 prev_range.end > range.start
2621 }
2622 });
2623
2624 if let Some((prev_range, prev_text)) = prev_edit
2625 && should_coalesce
2626 {
2627 prev_range.end = cmp::max(prev_range.end, range.end);
2628 *prev_text = format!("{prev_text}{new_text}").into();
2629 } else {
2630 edits.push((range, new_text));
2631 }
2632 }
2633 }
2634 if edits.is_empty() {
2635 return None;
2636 }
2637
2638 self.start_transaction();
2639 self.pending_autoindent.take();
2640 let autoindent_request = autoindent_mode
2641 .and_then(|mode| self.language.as_ref().map(|_| (self.snapshot(), mode)));
2642
2643 let edit_operation = self.text.edit(edits.iter().cloned());
2644 let edit_id = edit_operation.timestamp();
2645
2646 if let Some((before_edit, mode)) = autoindent_request {
2647 let mut delta = 0isize;
2648 let mut previous_setting = None;
2649 let entries: Vec<_> = edits
2650 .into_iter()
2651 .enumerate()
2652 .zip(&edit_operation.as_edit().unwrap().new_text)
2653 .filter(|((_, (range, _)), _)| {
2654 let language = before_edit.language_at(range.start);
2655 let language_id = language.map(|l| l.id());
2656 if let Some((cached_language_id, auto_indent)) = previous_setting
2657 && cached_language_id == language_id
2658 {
2659 auto_indent
2660 } else {
2661 // The auto-indent setting is not present in editorconfigs, hence
2662 // we can avoid passing the file here.
2663 let auto_indent =
2664 language_settings(language.map(|l| l.name()), None, cx).auto_indent;
2665 previous_setting = Some((language_id, auto_indent));
2666 auto_indent
2667 }
2668 })
2669 .map(|((ix, (range, _)), new_text)| {
2670 let new_text_length = new_text.len();
2671 let old_start = range.start.to_point(&before_edit);
2672 let new_start = (delta + range.start as isize) as usize;
2673 let range_len = range.end - range.start;
2674 delta += new_text_length as isize - range_len as isize;
2675
2676 // Decide what range of the insertion to auto-indent, and whether
2677 // the first line of the insertion should be considered a newly-inserted line
2678 // or an edit to an existing line.
2679 let mut range_of_insertion_to_indent = 0..new_text_length;
2680 let mut first_line_is_new = true;
2681
2682 let old_line_start = before_edit.indent_size_for_line(old_start.row).len;
2683 let old_line_end = before_edit.line_len(old_start.row);
2684
2685 if old_start.column > old_line_start {
2686 first_line_is_new = false;
2687 }
2688
2689 if !new_text.contains('\n')
2690 && (old_start.column + (range_len as u32) < old_line_end
2691 || old_line_end == old_line_start)
2692 {
2693 first_line_is_new = false;
2694 }
2695
2696 // When inserting text starting with a newline, avoid auto-indenting the
2697 // previous line.
2698 if new_text.starts_with('\n') {
2699 range_of_insertion_to_indent.start += 1;
2700 first_line_is_new = true;
2701 }
2702
2703 let mut original_indent_column = None;
2704 if let AutoindentMode::Block {
2705 original_indent_columns,
2706 } = &mode
2707 {
2708 original_indent_column = Some(if new_text.starts_with('\n') {
2709 indent_size_for_text(
2710 new_text[range_of_insertion_to_indent.clone()].chars(),
2711 )
2712 .len
2713 } else {
2714 original_indent_columns
2715 .get(ix)
2716 .copied()
2717 .flatten()
2718 .unwrap_or_else(|| {
2719 indent_size_for_text(
2720 new_text[range_of_insertion_to_indent.clone()].chars(),
2721 )
2722 .len
2723 })
2724 });
2725
2726 // Avoid auto-indenting the line after the edit.
2727 if new_text[range_of_insertion_to_indent.clone()].ends_with('\n') {
2728 range_of_insertion_to_indent.end -= 1;
2729 }
2730 }
2731
2732 AutoindentRequestEntry {
2733 original_indent_column,
2734 old_row: if first_line_is_new {
2735 None
2736 } else {
2737 Some(old_start.row)
2738 },
2739 indent_size: before_edit.language_indent_size_at(range.start, cx),
2740 range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
2741 ..self.anchor_after(new_start + range_of_insertion_to_indent.end),
2742 }
2743 })
2744 .collect();
2745
2746 if !entries.is_empty() {
2747 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2748 before_edit,
2749 entries,
2750 is_block_mode: matches!(mode, AutoindentMode::Block { .. }),
2751 ignore_empty_lines: false,
2752 }));
2753 }
2754 }
2755
2756 self.end_transaction(cx);
2757 self.send_operation(Operation::Buffer(edit_operation), true, cx);
2758 Some(edit_id)
2759 }
2760
2761 fn did_edit(&mut self, old_version: &clock::Global, was_dirty: bool, cx: &mut Context<Self>) {
2762 self.was_changed();
2763
2764 if self.edits_since::<usize>(old_version).next().is_none() {
2765 return;
2766 }
2767
2768 self.reparse(cx, true);
2769 cx.emit(BufferEvent::Edited);
2770 if was_dirty != self.is_dirty() {
2771 cx.emit(BufferEvent::DirtyChanged);
2772 }
2773 cx.notify();
2774 }
2775
2776 pub fn autoindent_ranges<I, T>(&mut self, ranges: I, cx: &mut Context<Self>)
2777 where
2778 I: IntoIterator<Item = Range<T>>,
2779 T: ToOffset + Copy,
2780 {
2781 let before_edit = self.snapshot();
2782 let entries = ranges
2783 .into_iter()
2784 .map(|range| AutoindentRequestEntry {
2785 range: before_edit.anchor_before(range.start)..before_edit.anchor_after(range.end),
2786 old_row: None,
2787 indent_size: before_edit.language_indent_size_at(range.start, cx),
2788 original_indent_column: None,
2789 })
2790 .collect();
2791 self.autoindent_requests.push(Arc::new(AutoindentRequest {
2792 before_edit,
2793 entries,
2794 is_block_mode: false,
2795 ignore_empty_lines: true,
2796 }));
2797 self.request_autoindent(cx, Duration::from_micros(300));
2798 }
2799
2800 // Inserts newlines at the given position to create an empty line, returning the start of the new line.
2801 // You can also request the insertion of empty lines above and below the line starting at the returned point.
2802 pub fn insert_empty_line(
2803 &mut self,
2804 position: impl ToPoint,
2805 space_above: bool,
2806 space_below: bool,
2807 cx: &mut Context<Self>,
2808 ) -> Point {
2809 let mut position = position.to_point(self);
2810
2811 self.start_transaction();
2812
2813 self.edit(
2814 [(position..position, "\n")],
2815 Some(AutoindentMode::EachLine),
2816 cx,
2817 );
2818
2819 if position.column > 0 {
2820 position += Point::new(1, 0);
2821 }
2822
2823 if !self.is_line_blank(position.row) {
2824 self.edit(
2825 [(position..position, "\n")],
2826 Some(AutoindentMode::EachLine),
2827 cx,
2828 );
2829 }
2830
2831 if space_above && position.row > 0 && !self.is_line_blank(position.row - 1) {
2832 self.edit(
2833 [(position..position, "\n")],
2834 Some(AutoindentMode::EachLine),
2835 cx,
2836 );
2837 position.row += 1;
2838 }
2839
2840 if space_below
2841 && (position.row == self.max_point().row || !self.is_line_blank(position.row + 1))
2842 {
2843 self.edit(
2844 [(position..position, "\n")],
2845 Some(AutoindentMode::EachLine),
2846 cx,
2847 );
2848 }
2849
2850 self.end_transaction(cx);
2851
2852 position
2853 }
2854
2855 /// Applies the given remote operations to the buffer.
2856 pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I, cx: &mut Context<Self>) {
2857 self.pending_autoindent.take();
2858 let was_dirty = self.is_dirty();
2859 let old_version = self.version.clone();
2860 let mut deferred_ops = Vec::new();
2861 let buffer_ops = ops
2862 .into_iter()
2863 .filter_map(|op| match op {
2864 Operation::Buffer(op) => Some(op),
2865 _ => {
2866 if self.can_apply_op(&op) {
2867 self.apply_op(op, cx);
2868 } else {
2869 deferred_ops.push(op);
2870 }
2871 None
2872 }
2873 })
2874 .collect::<Vec<_>>();
2875 for operation in buffer_ops.iter() {
2876 self.send_operation(Operation::Buffer(operation.clone()), false, cx);
2877 }
2878 self.text.apply_ops(buffer_ops);
2879 self.deferred_ops.insert(deferred_ops);
2880 self.flush_deferred_ops(cx);
2881 self.did_edit(&old_version, was_dirty, cx);
2882 // Notify independently of whether the buffer was edited as the operations could include a
2883 // selection update.
2884 cx.notify();
2885 }
2886
2887 fn flush_deferred_ops(&mut self, cx: &mut Context<Self>) {
2888 let mut deferred_ops = Vec::new();
2889 for op in self.deferred_ops.drain().iter().cloned() {
2890 if self.can_apply_op(&op) {
2891 self.apply_op(op, cx);
2892 } else {
2893 deferred_ops.push(op);
2894 }
2895 }
2896 self.deferred_ops.insert(deferred_ops);
2897 }
2898
2899 pub fn has_deferred_ops(&self) -> bool {
2900 !self.deferred_ops.is_empty() || self.text.has_deferred_ops()
2901 }
2902
2903 fn can_apply_op(&self, operation: &Operation) -> bool {
2904 match operation {
2905 Operation::Buffer(_) => {
2906 unreachable!("buffer operations should never be applied at this layer")
2907 }
2908 Operation::UpdateDiagnostics {
2909 diagnostics: diagnostic_set,
2910 ..
2911 } => diagnostic_set.iter().all(|diagnostic| {
2912 self.text.can_resolve(&diagnostic.range.start)
2913 && self.text.can_resolve(&diagnostic.range.end)
2914 }),
2915 Operation::UpdateSelections { selections, .. } => selections
2916 .iter()
2917 .all(|s| self.can_resolve(&s.start) && self.can_resolve(&s.end)),
2918 Operation::UpdateCompletionTriggers { .. } | Operation::UpdateLineEnding { .. } => true,
2919 }
2920 }
2921
2922 fn apply_op(&mut self, operation: Operation, cx: &mut Context<Self>) {
2923 match operation {
2924 Operation::Buffer(_) => {
2925 unreachable!("buffer operations should never be applied at this layer")
2926 }
2927 Operation::UpdateDiagnostics {
2928 server_id,
2929 diagnostics: diagnostic_set,
2930 lamport_timestamp,
2931 } => {
2932 let snapshot = self.snapshot();
2933 self.apply_diagnostic_update(
2934 server_id,
2935 DiagnosticSet::from_sorted_entries(diagnostic_set.iter().cloned(), &snapshot),
2936 lamport_timestamp,
2937 cx,
2938 );
2939 }
2940 Operation::UpdateSelections {
2941 selections,
2942 lamport_timestamp,
2943 line_mode,
2944 cursor_shape,
2945 } => {
2946 if let Some(set) = self.remote_selections.get(&lamport_timestamp.replica_id)
2947 && set.lamport_timestamp > lamport_timestamp
2948 {
2949 return;
2950 }
2951
2952 self.remote_selections.insert(
2953 lamport_timestamp.replica_id,
2954 SelectionSet {
2955 selections,
2956 lamport_timestamp,
2957 line_mode,
2958 cursor_shape,
2959 },
2960 );
2961 self.text.lamport_clock.observe(lamport_timestamp);
2962 self.non_text_state_update_count += 1;
2963 }
2964 Operation::UpdateCompletionTriggers {
2965 triggers,
2966 lamport_timestamp,
2967 server_id,
2968 } => {
2969 if triggers.is_empty() {
2970 self.completion_triggers_per_language_server
2971 .remove(&server_id);
2972 self.completion_triggers = self
2973 .completion_triggers_per_language_server
2974 .values()
2975 .flat_map(|triggers| triggers.iter().cloned())
2976 .collect();
2977 } else {
2978 self.completion_triggers_per_language_server
2979 .insert(server_id, triggers.iter().cloned().collect());
2980 self.completion_triggers.extend(triggers);
2981 }
2982 self.text.lamport_clock.observe(lamport_timestamp);
2983 }
2984 Operation::UpdateLineEnding {
2985 line_ending,
2986 lamport_timestamp,
2987 } => {
2988 self.text.set_line_ending(line_ending);
2989 self.text.lamport_clock.observe(lamport_timestamp);
2990 }
2991 }
2992 }
2993
2994 fn apply_diagnostic_update(
2995 &mut self,
2996 server_id: LanguageServerId,
2997 diagnostics: DiagnosticSet,
2998 lamport_timestamp: clock::Lamport,
2999 cx: &mut Context<Self>,
3000 ) {
3001 if lamport_timestamp > self.diagnostics_timestamp {
3002 let ix = self.diagnostics.binary_search_by_key(&server_id, |e| e.0);
3003 if diagnostics.is_empty() {
3004 if let Ok(ix) = ix {
3005 self.diagnostics.remove(ix);
3006 }
3007 } else {
3008 match ix {
3009 Err(ix) => self.diagnostics.insert(ix, (server_id, diagnostics)),
3010 Ok(ix) => self.diagnostics[ix].1 = diagnostics,
3011 };
3012 }
3013 self.diagnostics_timestamp = lamport_timestamp;
3014 self.non_text_state_update_count += 1;
3015 self.text.lamport_clock.observe(lamport_timestamp);
3016 cx.notify();
3017 cx.emit(BufferEvent::DiagnosticsUpdated);
3018 }
3019 }
3020
3021 fn send_operation(&mut self, operation: Operation, is_local: bool, cx: &mut Context<Self>) {
3022 self.was_changed();
3023 cx.emit(BufferEvent::Operation {
3024 operation,
3025 is_local,
3026 });
3027 }
3028
3029 /// Removes the selections for a given peer.
3030 pub fn remove_peer(&mut self, replica_id: ReplicaId, cx: &mut Context<Self>) {
3031 self.remote_selections.remove(&replica_id);
3032 cx.notify();
3033 }
3034
3035 /// Undoes the most recent transaction.
3036 pub fn undo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3037 let was_dirty = self.is_dirty();
3038 let old_version = self.version.clone();
3039
3040 if let Some((transaction_id, operation)) = self.text.undo() {
3041 self.send_operation(Operation::Buffer(operation), true, cx);
3042 self.did_edit(&old_version, was_dirty, cx);
3043 Some(transaction_id)
3044 } else {
3045 None
3046 }
3047 }
3048
3049 /// Manually undoes a specific transaction in the buffer's undo history.
3050 pub fn undo_transaction(
3051 &mut self,
3052 transaction_id: TransactionId,
3053 cx: &mut Context<Self>,
3054 ) -> bool {
3055 let was_dirty = self.is_dirty();
3056 let old_version = self.version.clone();
3057 if let Some(operation) = self.text.undo_transaction(transaction_id) {
3058 self.send_operation(Operation::Buffer(operation), true, cx);
3059 self.did_edit(&old_version, was_dirty, cx);
3060 true
3061 } else {
3062 false
3063 }
3064 }
3065
3066 /// Manually undoes all changes after a given transaction in the buffer's undo history.
3067 pub fn undo_to_transaction(
3068 &mut self,
3069 transaction_id: TransactionId,
3070 cx: &mut Context<Self>,
3071 ) -> bool {
3072 let was_dirty = self.is_dirty();
3073 let old_version = self.version.clone();
3074
3075 let operations = self.text.undo_to_transaction(transaction_id);
3076 let undone = !operations.is_empty();
3077 for operation in operations {
3078 self.send_operation(Operation::Buffer(operation), true, cx);
3079 }
3080 if undone {
3081 self.did_edit(&old_version, was_dirty, cx)
3082 }
3083 undone
3084 }
3085
3086 pub fn undo_operations(&mut self, counts: HashMap<Lamport, u32>, cx: &mut Context<Buffer>) {
3087 let was_dirty = self.is_dirty();
3088 let operation = self.text.undo_operations(counts);
3089 let old_version = self.version.clone();
3090 self.send_operation(Operation::Buffer(operation), true, cx);
3091 self.did_edit(&old_version, was_dirty, cx);
3092 }
3093
3094 /// Manually redoes a specific transaction in the buffer's redo history.
3095 pub fn redo(&mut self, cx: &mut Context<Self>) -> Option<TransactionId> {
3096 let was_dirty = self.is_dirty();
3097 let old_version = self.version.clone();
3098
3099 if let Some((transaction_id, operation)) = self.text.redo() {
3100 self.send_operation(Operation::Buffer(operation), true, cx);
3101 self.did_edit(&old_version, was_dirty, cx);
3102 Some(transaction_id)
3103 } else {
3104 None
3105 }
3106 }
3107
3108 /// Manually undoes all changes until a given transaction in the buffer's redo history.
3109 pub fn redo_to_transaction(
3110 &mut self,
3111 transaction_id: TransactionId,
3112 cx: &mut Context<Self>,
3113 ) -> bool {
3114 let was_dirty = self.is_dirty();
3115 let old_version = self.version.clone();
3116
3117 let operations = self.text.redo_to_transaction(transaction_id);
3118 let redone = !operations.is_empty();
3119 for operation in operations {
3120 self.send_operation(Operation::Buffer(operation), true, cx);
3121 }
3122 if redone {
3123 self.did_edit(&old_version, was_dirty, cx)
3124 }
3125 redone
3126 }
3127
3128 /// Override current completion triggers with the user-provided completion triggers.
3129 pub fn set_completion_triggers(
3130 &mut self,
3131 server_id: LanguageServerId,
3132 triggers: BTreeSet<String>,
3133 cx: &mut Context<Self>,
3134 ) {
3135 self.completion_triggers_timestamp = self.text.lamport_clock.tick();
3136 if triggers.is_empty() {
3137 self.completion_triggers_per_language_server
3138 .remove(&server_id);
3139 self.completion_triggers = self
3140 .completion_triggers_per_language_server
3141 .values()
3142 .flat_map(|triggers| triggers.iter().cloned())
3143 .collect();
3144 } else {
3145 self.completion_triggers_per_language_server
3146 .insert(server_id, triggers.clone());
3147 self.completion_triggers.extend(triggers.iter().cloned());
3148 }
3149 self.send_operation(
3150 Operation::UpdateCompletionTriggers {
3151 triggers: triggers.into_iter().collect(),
3152 lamport_timestamp: self.completion_triggers_timestamp,
3153 server_id,
3154 },
3155 true,
3156 cx,
3157 );
3158 cx.notify();
3159 }
3160
3161 /// Returns a list of strings which trigger a completion menu for this language.
3162 /// Usually this is driven by LSP server which returns a list of trigger characters for completions.
3163 pub fn completion_triggers(&self) -> &BTreeSet<String> {
3164 &self.completion_triggers
3165 }
3166
3167 /// Call this directly after performing edits to prevent the preview tab
3168 /// from being dismissed by those edits. It causes `should_dismiss_preview`
3169 /// to return false until there are additional edits.
3170 pub fn refresh_preview(&mut self) {
3171 self.preview_version = self.version.clone();
3172 }
3173
3174 /// Whether we should preserve the preview status of a tab containing this buffer.
3175 pub fn preserve_preview(&self) -> bool {
3176 !self.has_edits_since(&self.preview_version)
3177 }
3178}
3179
3180#[doc(hidden)]
3181#[cfg(any(test, feature = "test-support"))]
3182impl Buffer {
3183 pub fn edit_via_marked_text(
3184 &mut self,
3185 marked_string: &str,
3186 autoindent_mode: Option<AutoindentMode>,
3187 cx: &mut Context<Self>,
3188 ) {
3189 let edits = self.edits_for_marked_text(marked_string);
3190 self.edit(edits, autoindent_mode, cx);
3191 }
3192
3193 pub fn set_group_interval(&mut self, group_interval: Duration) {
3194 self.text.set_group_interval(group_interval);
3195 }
3196
3197 pub fn randomly_edit<T>(&mut self, rng: &mut T, old_range_count: usize, cx: &mut Context<Self>)
3198 where
3199 T: rand::Rng,
3200 {
3201 let mut edits: Vec<(Range<usize>, String)> = Vec::new();
3202 let mut last_end = None;
3203 for _ in 0..old_range_count {
3204 if last_end.is_some_and(|last_end| last_end >= self.len()) {
3205 break;
3206 }
3207
3208 let new_start = last_end.map_or(0, |last_end| last_end + 1);
3209 let mut range = self.random_byte_range(new_start, rng);
3210 if rng.random_bool(0.2) {
3211 mem::swap(&mut range.start, &mut range.end);
3212 }
3213 last_end = Some(range.end);
3214
3215 let new_text_len = rng.random_range(0..10);
3216 let mut new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
3217 new_text = new_text.to_uppercase();
3218
3219 edits.push((range, new_text));
3220 }
3221 log::info!("mutating buffer {:?} with {:?}", self.replica_id(), edits);
3222 self.edit(edits, None, cx);
3223 }
3224
3225 pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng, cx: &mut Context<Self>) {
3226 let was_dirty = self.is_dirty();
3227 let old_version = self.version.clone();
3228
3229 let ops = self.text.randomly_undo_redo(rng);
3230 if !ops.is_empty() {
3231 for op in ops {
3232 self.send_operation(Operation::Buffer(op), true, cx);
3233 self.did_edit(&old_version, was_dirty, cx);
3234 }
3235 }
3236 }
3237}
3238
3239impl EventEmitter<BufferEvent> for Buffer {}
3240
3241impl Deref for Buffer {
3242 type Target = TextBuffer;
3243
3244 fn deref(&self) -> &Self::Target {
3245 &self.text
3246 }
3247}
3248
3249impl BufferSnapshot {
3250 /// Returns [`IndentSize`] for a given line that respects user settings and
3251 /// language preferences.
3252 pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
3253 indent_size_for_line(self, row)
3254 }
3255
3256 /// Returns [`IndentSize`] for a given position that respects user settings
3257 /// and language preferences.
3258 pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &App) -> IndentSize {
3259 let settings = language_settings(
3260 self.language_at(position).map(|l| l.name()),
3261 self.file(),
3262 cx,
3263 );
3264 if settings.hard_tabs {
3265 IndentSize::tab()
3266 } else {
3267 IndentSize::spaces(settings.tab_size.get())
3268 }
3269 }
3270
3271 /// Retrieve the suggested indent size for all of the given rows. The unit of indentation
3272 /// is passed in as `single_indent_size`.
3273 pub fn suggested_indents(
3274 &self,
3275 rows: impl Iterator<Item = u32>,
3276 single_indent_size: IndentSize,
3277 ) -> BTreeMap<u32, IndentSize> {
3278 let mut result = BTreeMap::new();
3279
3280 for row_range in contiguous_ranges(rows, 10) {
3281 let suggestions = match self.suggest_autoindents(row_range.clone()) {
3282 Some(suggestions) => suggestions,
3283 _ => break,
3284 };
3285
3286 for (row, suggestion) in row_range.zip(suggestions) {
3287 let indent_size = if let Some(suggestion) = suggestion {
3288 result
3289 .get(&suggestion.basis_row)
3290 .copied()
3291 .unwrap_or_else(|| self.indent_size_for_line(suggestion.basis_row))
3292 .with_delta(suggestion.delta, single_indent_size)
3293 } else {
3294 self.indent_size_for_line(row)
3295 };
3296
3297 result.insert(row, indent_size);
3298 }
3299 }
3300
3301 result
3302 }
3303
3304 fn suggest_autoindents(
3305 &self,
3306 row_range: Range<u32>,
3307 ) -> Option<impl Iterator<Item = Option<IndentSuggestion>> + '_> {
3308 let config = &self.language.as_ref()?.config;
3309 let prev_non_blank_row = self.prev_non_blank_row(row_range.start);
3310
3311 #[derive(Debug, Clone)]
3312 struct StartPosition {
3313 start: Point,
3314 suffix: SharedString,
3315 language: Arc<Language>,
3316 }
3317
3318 // Find the suggested indentation ranges based on the syntax tree.
3319 let start = Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0);
3320 let end = Point::new(row_range.end, 0);
3321 let range = (start..end).to_offset(&self.text);
3322 let mut matches = self.syntax.matches_with_options(
3323 range.clone(),
3324 &self.text,
3325 TreeSitterOptions {
3326 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
3327 max_start_depth: None,
3328 },
3329 |grammar| Some(&grammar.indents_config.as_ref()?.query),
3330 );
3331 let indent_configs = matches
3332 .grammars()
3333 .iter()
3334 .map(|grammar| grammar.indents_config.as_ref().unwrap())
3335 .collect::<Vec<_>>();
3336
3337 let mut indent_ranges = Vec::<Range<Point>>::new();
3338 let mut start_positions = Vec::<StartPosition>::new();
3339 let mut outdent_positions = Vec::<Point>::new();
3340 while let Some(mat) = matches.peek() {
3341 let mut start: Option<Point> = None;
3342 let mut end: Option<Point> = None;
3343
3344 let config = indent_configs[mat.grammar_index];
3345 for capture in mat.captures {
3346 if capture.index == config.indent_capture_ix {
3347 start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
3348 end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
3349 } else if Some(capture.index) == config.start_capture_ix {
3350 start = Some(Point::from_ts_point(capture.node.end_position()));
3351 } else if Some(capture.index) == config.end_capture_ix {
3352 end = Some(Point::from_ts_point(capture.node.start_position()));
3353 } else if Some(capture.index) == config.outdent_capture_ix {
3354 outdent_positions.push(Point::from_ts_point(capture.node.start_position()));
3355 } else if let Some(suffix) = config.suffixed_start_captures.get(&capture.index) {
3356 start_positions.push(StartPosition {
3357 start: Point::from_ts_point(capture.node.start_position()),
3358 suffix: suffix.clone(),
3359 language: mat.language.clone(),
3360 });
3361 }
3362 }
3363
3364 matches.advance();
3365 if let Some((start, end)) = start.zip(end) {
3366 if start.row == end.row {
3367 continue;
3368 }
3369 let range = start..end;
3370 match indent_ranges.binary_search_by_key(&range.start, |r| r.start) {
3371 Err(ix) => indent_ranges.insert(ix, range),
3372 Ok(ix) => {
3373 let prev_range = &mut indent_ranges[ix];
3374 prev_range.end = prev_range.end.max(range.end);
3375 }
3376 }
3377 }
3378 }
3379
3380 let mut error_ranges = Vec::<Range<Point>>::new();
3381 let mut matches = self
3382 .syntax
3383 .matches(range, &self.text, |grammar| grammar.error_query.as_ref());
3384 while let Some(mat) = matches.peek() {
3385 let node = mat.captures[0].node;
3386 let start = Point::from_ts_point(node.start_position());
3387 let end = Point::from_ts_point(node.end_position());
3388 let range = start..end;
3389 let ix = match error_ranges.binary_search_by_key(&range.start, |r| r.start) {
3390 Ok(ix) | Err(ix) => ix,
3391 };
3392 let mut end_ix = ix;
3393 while let Some(existing_range) = error_ranges.get(end_ix) {
3394 if existing_range.end < end {
3395 end_ix += 1;
3396 } else {
3397 break;
3398 }
3399 }
3400 error_ranges.splice(ix..end_ix, [range]);
3401 matches.advance();
3402 }
3403
3404 outdent_positions.sort();
3405 for outdent_position in outdent_positions {
3406 // find the innermost indent range containing this outdent_position
3407 // set its end to the outdent position
3408 if let Some(range_to_truncate) = indent_ranges
3409 .iter_mut()
3410 .rfind(|indent_range| indent_range.contains(&outdent_position))
3411 {
3412 range_to_truncate.end = outdent_position;
3413 }
3414 }
3415
3416 start_positions.sort_by_key(|b| b.start);
3417
3418 // Find the suggested indentation increases and decreased based on regexes.
3419 let mut regex_outdent_map = HashMap::default();
3420 let mut last_seen_suffix: HashMap<String, Vec<StartPosition>> = HashMap::default();
3421 let mut start_positions_iter = start_positions.iter().peekable();
3422
3423 let mut indent_change_rows = Vec::<(u32, Ordering)>::new();
3424 self.for_each_line(
3425 Point::new(prev_non_blank_row.unwrap_or(row_range.start), 0)
3426 ..Point::new(row_range.end, 0),
3427 |row, line| {
3428 let indent_len = self.indent_size_for_line(row).len;
3429 let row_language = self.language_at(Point::new(row, indent_len)).cloned();
3430 let row_language_config = row_language
3431 .as_ref()
3432 .map(|lang| lang.config())
3433 .unwrap_or(config);
3434
3435 if row_language_config
3436 .decrease_indent_pattern
3437 .as_ref()
3438 .is_some_and(|regex| regex.is_match(line))
3439 {
3440 indent_change_rows.push((row, Ordering::Less));
3441 }
3442 if row_language_config
3443 .increase_indent_pattern
3444 .as_ref()
3445 .is_some_and(|regex| regex.is_match(line))
3446 {
3447 indent_change_rows.push((row + 1, Ordering::Greater));
3448 }
3449 while let Some(pos) = start_positions_iter.peek() {
3450 if pos.start.row < row {
3451 let pos = start_positions_iter.next().unwrap().clone();
3452 last_seen_suffix
3453 .entry(pos.suffix.to_string())
3454 .or_default()
3455 .push(pos);
3456 } else {
3457 break;
3458 }
3459 }
3460 for rule in &row_language_config.decrease_indent_patterns {
3461 if rule.pattern.as_ref().is_some_and(|r| r.is_match(line)) {
3462 let row_start_column = self.indent_size_for_line(row).len;
3463 let basis_row = rule
3464 .valid_after
3465 .iter()
3466 .filter_map(|valid_suffix| last_seen_suffix.get(valid_suffix))
3467 .flatten()
3468 .filter(|pos| {
3469 row_language
3470 .as_ref()
3471 .or(self.language.as_ref())
3472 .is_some_and(|lang| Arc::ptr_eq(lang, &pos.language))
3473 })
3474 .filter(|pos| pos.start.column <= row_start_column)
3475 .max_by_key(|pos| pos.start.row);
3476 if let Some(outdent_to) = basis_row {
3477 regex_outdent_map.insert(row, outdent_to.start.row);
3478 }
3479 break;
3480 }
3481 }
3482 },
3483 );
3484
3485 let mut indent_changes = indent_change_rows.into_iter().peekable();
3486 let mut prev_row = if config.auto_indent_using_last_non_empty_line {
3487 prev_non_blank_row.unwrap_or(0)
3488 } else {
3489 row_range.start.saturating_sub(1)
3490 };
3491
3492 let mut prev_row_start = Point::new(prev_row, self.indent_size_for_line(prev_row).len);
3493 Some(row_range.map(move |row| {
3494 let row_start = Point::new(row, self.indent_size_for_line(row).len);
3495
3496 let mut indent_from_prev_row = false;
3497 let mut outdent_from_prev_row = false;
3498 let mut outdent_to_row = u32::MAX;
3499 let mut from_regex = false;
3500
3501 while let Some((indent_row, delta)) = indent_changes.peek() {
3502 match indent_row.cmp(&row) {
3503 Ordering::Equal => match delta {
3504 Ordering::Less => {
3505 from_regex = true;
3506 outdent_from_prev_row = true
3507 }
3508 Ordering::Greater => {
3509 indent_from_prev_row = true;
3510 from_regex = true
3511 }
3512 _ => {}
3513 },
3514
3515 Ordering::Greater => break,
3516 Ordering::Less => {}
3517 }
3518
3519 indent_changes.next();
3520 }
3521
3522 for range in &indent_ranges {
3523 if range.start.row >= row {
3524 break;
3525 }
3526 if range.start.row == prev_row && range.end > row_start {
3527 indent_from_prev_row = true;
3528 }
3529 if range.end > prev_row_start && range.end <= row_start {
3530 outdent_to_row = outdent_to_row.min(range.start.row);
3531 }
3532 }
3533
3534 if let Some(basis_row) = regex_outdent_map.get(&row) {
3535 indent_from_prev_row = false;
3536 outdent_to_row = *basis_row;
3537 from_regex = true;
3538 }
3539
3540 let within_error = error_ranges
3541 .iter()
3542 .any(|e| e.start.row < row && e.end > row_start);
3543
3544 let suggestion = if outdent_to_row == prev_row
3545 || (outdent_from_prev_row && indent_from_prev_row)
3546 {
3547 Some(IndentSuggestion {
3548 basis_row: prev_row,
3549 delta: Ordering::Equal,
3550 within_error: within_error && !from_regex,
3551 })
3552 } else if indent_from_prev_row {
3553 Some(IndentSuggestion {
3554 basis_row: prev_row,
3555 delta: Ordering::Greater,
3556 within_error: within_error && !from_regex,
3557 })
3558 } else if outdent_to_row < prev_row {
3559 Some(IndentSuggestion {
3560 basis_row: outdent_to_row,
3561 delta: Ordering::Equal,
3562 within_error: within_error && !from_regex,
3563 })
3564 } else if outdent_from_prev_row {
3565 Some(IndentSuggestion {
3566 basis_row: prev_row,
3567 delta: Ordering::Less,
3568 within_error: within_error && !from_regex,
3569 })
3570 } else if config.auto_indent_using_last_non_empty_line || !self.is_line_blank(prev_row)
3571 {
3572 Some(IndentSuggestion {
3573 basis_row: prev_row,
3574 delta: Ordering::Equal,
3575 within_error: within_error && !from_regex,
3576 })
3577 } else {
3578 None
3579 };
3580
3581 prev_row = row;
3582 prev_row_start = row_start;
3583 suggestion
3584 }))
3585 }
3586
3587 fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
3588 while row > 0 {
3589 row -= 1;
3590 if !self.is_line_blank(row) {
3591 return Some(row);
3592 }
3593 }
3594 None
3595 }
3596
3597 #[ztracing::instrument(skip_all)]
3598 fn get_highlights(&self, range: Range<usize>) -> (SyntaxMapCaptures<'_>, Vec<HighlightMap>) {
3599 let captures = self.syntax.captures(range, &self.text, |grammar| {
3600 grammar
3601 .highlights_config
3602 .as_ref()
3603 .map(|config| &config.query)
3604 });
3605 let highlight_maps = captures
3606 .grammars()
3607 .iter()
3608 .map(|grammar| grammar.highlight_map())
3609 .collect();
3610 (captures, highlight_maps)
3611 }
3612
3613 /// Iterates over chunks of text in the given range of the buffer. Text is chunked
3614 /// in an arbitrary way due to being stored in a [`Rope`](text::Rope). The text is also
3615 /// returned in chunks where each chunk has a single syntax highlighting style and
3616 /// diagnostic status.
3617 #[ztracing::instrument(skip_all)]
3618 pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> BufferChunks<'_> {
3619 let range = range.start.to_offset(self)..range.end.to_offset(self);
3620
3621 let mut syntax = None;
3622 if language_aware {
3623 syntax = Some(self.get_highlights(range.clone()));
3624 }
3625 // We want to look at diagnostic spans only when iterating over language-annotated chunks.
3626 let diagnostics = language_aware;
3627 BufferChunks::new(self.text.as_rope(), range, syntax, diagnostics, Some(self))
3628 }
3629
3630 pub fn highlighted_text_for_range<T: ToOffset>(
3631 &self,
3632 range: Range<T>,
3633 override_style: Option<HighlightStyle>,
3634 syntax_theme: &SyntaxTheme,
3635 ) -> HighlightedText {
3636 HighlightedText::from_buffer_range(
3637 range,
3638 &self.text,
3639 &self.syntax,
3640 override_style,
3641 syntax_theme,
3642 )
3643 }
3644
3645 /// Invokes the given callback for each line of text in the given range of the buffer.
3646 /// Uses callback to avoid allocating a string for each line.
3647 fn for_each_line(&self, range: Range<Point>, mut callback: impl FnMut(u32, &str)) {
3648 let mut line = String::new();
3649 let mut row = range.start.row;
3650 for chunk in self
3651 .as_rope()
3652 .chunks_in_range(range.to_offset(self))
3653 .chain(["\n"])
3654 {
3655 for (newline_ix, text) in chunk.split('\n').enumerate() {
3656 if newline_ix > 0 {
3657 callback(row, &line);
3658 row += 1;
3659 line.clear();
3660 }
3661 line.push_str(text);
3662 }
3663 }
3664 }
3665
3666 /// Iterates over every [`SyntaxLayer`] in the buffer.
3667 pub fn syntax_layers(&self) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3668 self.syntax_layers_for_range(0..self.len(), true)
3669 }
3670
3671 pub fn syntax_layer_at<D: ToOffset>(&self, position: D) -> Option<SyntaxLayer<'_>> {
3672 let offset = position.to_offset(self);
3673 self.syntax_layers_for_range(offset..offset, false)
3674 .filter(|l| {
3675 if let Some(ranges) = l.included_sub_ranges {
3676 ranges.iter().any(|range| {
3677 let start = range.start.to_offset(self);
3678 start <= offset && {
3679 let end = range.end.to_offset(self);
3680 offset < end
3681 }
3682 })
3683 } else {
3684 l.node().start_byte() <= offset && l.node().end_byte() > offset
3685 }
3686 })
3687 .last()
3688 }
3689
3690 pub fn syntax_layers_for_range<D: ToOffset>(
3691 &self,
3692 range: Range<D>,
3693 include_hidden: bool,
3694 ) -> impl Iterator<Item = SyntaxLayer<'_>> + '_ {
3695 self.syntax
3696 .layers_for_range(range, &self.text, include_hidden)
3697 }
3698
3699 pub fn smallest_syntax_layer_containing<D: ToOffset>(
3700 &self,
3701 range: Range<D>,
3702 ) -> Option<SyntaxLayer<'_>> {
3703 let range = range.to_offset(self);
3704 self.syntax
3705 .layers_for_range(range, &self.text, false)
3706 .max_by(|a, b| {
3707 if a.depth != b.depth {
3708 a.depth.cmp(&b.depth)
3709 } else if a.offset.0 != b.offset.0 {
3710 a.offset.0.cmp(&b.offset.0)
3711 } else {
3712 a.node().end_byte().cmp(&b.node().end_byte()).reverse()
3713 }
3714 })
3715 }
3716
3717 /// Returns the main [`Language`].
3718 pub fn language(&self) -> Option<&Arc<Language>> {
3719 self.language.as_ref()
3720 }
3721
3722 /// Returns the [`Language`] at the given location.
3723 pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
3724 self.syntax_layer_at(position)
3725 .map(|info| info.language)
3726 .or(self.language.as_ref())
3727 }
3728
3729 /// Returns the settings for the language at the given location.
3730 pub fn settings_at<'a, D: ToOffset>(
3731 &'a self,
3732 position: D,
3733 cx: &'a App,
3734 ) -> Cow<'a, LanguageSettings> {
3735 language_settings(
3736 self.language_at(position).map(|l| l.name()),
3737 self.file.as_ref(),
3738 cx,
3739 )
3740 }
3741
3742 pub fn char_classifier_at<T: ToOffset>(&self, point: T) -> CharClassifier {
3743 CharClassifier::new(self.language_scope_at(point))
3744 }
3745
3746 /// Returns the [`LanguageScope`] at the given location.
3747 pub fn language_scope_at<D: ToOffset>(&self, position: D) -> Option<LanguageScope> {
3748 let offset = position.to_offset(self);
3749 let mut scope = None;
3750 let mut smallest_range_and_depth: Option<(Range<usize>, usize)> = None;
3751
3752 // Use the layer that has the smallest node intersecting the given point.
3753 for layer in self
3754 .syntax
3755 .layers_for_range(offset..offset, &self.text, false)
3756 {
3757 let mut cursor = layer.node().walk();
3758
3759 let mut range = None;
3760 loop {
3761 let child_range = cursor.node().byte_range();
3762 if !child_range.contains(&offset) {
3763 break;
3764 }
3765
3766 range = Some(child_range);
3767 if cursor.goto_first_child_for_byte(offset).is_none() {
3768 break;
3769 }
3770 }
3771
3772 if let Some(range) = range
3773 && smallest_range_and_depth.as_ref().is_none_or(
3774 |(smallest_range, smallest_range_depth)| {
3775 if layer.depth > *smallest_range_depth {
3776 true
3777 } else if layer.depth == *smallest_range_depth {
3778 range.len() < smallest_range.len()
3779 } else {
3780 false
3781 }
3782 },
3783 )
3784 {
3785 smallest_range_and_depth = Some((range, layer.depth));
3786 scope = Some(LanguageScope {
3787 language: layer.language.clone(),
3788 override_id: layer.override_id(offset, &self.text),
3789 });
3790 }
3791 }
3792
3793 scope.or_else(|| {
3794 self.language.clone().map(|language| LanguageScope {
3795 language,
3796 override_id: None,
3797 })
3798 })
3799 }
3800
3801 /// Returns a tuple of the range and character kind of the word
3802 /// surrounding the given position.
3803 pub fn surrounding_word<T: ToOffset>(
3804 &self,
3805 start: T,
3806 scope_context: Option<CharScopeContext>,
3807 ) -> (Range<usize>, Option<CharKind>) {
3808 let mut start = start.to_offset(self);
3809 let mut end = start;
3810 let mut next_chars = self.chars_at(start).take(128).peekable();
3811 let mut prev_chars = self.reversed_chars_at(start).take(128).peekable();
3812
3813 let classifier = self.char_classifier_at(start).scope_context(scope_context);
3814 let word_kind = cmp::max(
3815 prev_chars.peek().copied().map(|c| classifier.kind(c)),
3816 next_chars.peek().copied().map(|c| classifier.kind(c)),
3817 );
3818
3819 for ch in prev_chars {
3820 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3821 start -= ch.len_utf8();
3822 } else {
3823 break;
3824 }
3825 }
3826
3827 for ch in next_chars {
3828 if Some(classifier.kind(ch)) == word_kind && ch != '\n' {
3829 end += ch.len_utf8();
3830 } else {
3831 break;
3832 }
3833 }
3834
3835 (start..end, word_kind)
3836 }
3837
3838 /// Moves the TreeCursor to the smallest descendant or ancestor syntax node enclosing the given
3839 /// range. When `require_larger` is true, the node found must be larger than the query range.
3840 ///
3841 /// Returns true if a node was found, and false otherwise. In the `false` case the cursor will
3842 /// be moved to the root of the tree.
3843 fn goto_node_enclosing_range(
3844 cursor: &mut tree_sitter::TreeCursor,
3845 query_range: &Range<usize>,
3846 require_larger: bool,
3847 ) -> bool {
3848 let mut ascending = false;
3849 loop {
3850 let mut range = cursor.node().byte_range();
3851 if query_range.is_empty() {
3852 // When the query range is empty and the current node starts after it, move to the
3853 // previous sibling to find the node the containing node.
3854 if range.start > query_range.start {
3855 cursor.goto_previous_sibling();
3856 range = cursor.node().byte_range();
3857 }
3858 } else {
3859 // When the query range is non-empty and the current node ends exactly at the start,
3860 // move to the next sibling to find a node that extends beyond the start.
3861 if range.end == query_range.start {
3862 cursor.goto_next_sibling();
3863 range = cursor.node().byte_range();
3864 }
3865 }
3866
3867 let encloses = range.contains_inclusive(query_range)
3868 && (!require_larger || range.len() > query_range.len());
3869 if !encloses {
3870 ascending = true;
3871 if !cursor.goto_parent() {
3872 return false;
3873 }
3874 continue;
3875 } else if ascending {
3876 return true;
3877 }
3878
3879 // Descend into the current node.
3880 if cursor
3881 .goto_first_child_for_byte(query_range.start)
3882 .is_none()
3883 {
3884 return true;
3885 }
3886 }
3887 }
3888
3889 pub fn syntax_ancestor<'a, T: ToOffset>(
3890 &'a self,
3891 range: Range<T>,
3892 ) -> Option<tree_sitter::Node<'a>> {
3893 let range = range.start.to_offset(self)..range.end.to_offset(self);
3894 let mut result: Option<tree_sitter::Node<'a>> = None;
3895 for layer in self
3896 .syntax
3897 .layers_for_range(range.clone(), &self.text, true)
3898 {
3899 let mut cursor = layer.node().walk();
3900
3901 // Find the node that both contains the range and is larger than it.
3902 if !Self::goto_node_enclosing_range(&mut cursor, &range, true) {
3903 continue;
3904 }
3905
3906 let left_node = cursor.node();
3907 let mut layer_result = left_node;
3908
3909 // For an empty range, try to find another node immediately to the right of the range.
3910 if left_node.end_byte() == range.start {
3911 let mut right_node = None;
3912 while !cursor.goto_next_sibling() {
3913 if !cursor.goto_parent() {
3914 break;
3915 }
3916 }
3917
3918 while cursor.node().start_byte() == range.start {
3919 right_node = Some(cursor.node());
3920 if !cursor.goto_first_child() {
3921 break;
3922 }
3923 }
3924
3925 // If there is a candidate node on both sides of the (empty) range, then
3926 // decide between the two by favoring a named node over an anonymous token.
3927 // If both nodes are the same in that regard, favor the right one.
3928 if let Some(right_node) = right_node
3929 && (right_node.is_named() || !left_node.is_named())
3930 {
3931 layer_result = right_node;
3932 }
3933 }
3934
3935 if let Some(previous_result) = &result
3936 && previous_result.byte_range().len() < layer_result.byte_range().len()
3937 {
3938 continue;
3939 }
3940 result = Some(layer_result);
3941 }
3942
3943 result
3944 }
3945
3946 /// Find the previous sibling syntax node at the given range.
3947 ///
3948 /// This function locates the syntax node that precedes the node containing
3949 /// the given range. It searches hierarchically by:
3950 /// 1. Finding the node that contains the given range
3951 /// 2. Looking for the previous sibling at the same tree level
3952 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
3953 ///
3954 /// Returns `None` if there is no previous sibling at any ancestor level.
3955 pub fn syntax_prev_sibling<'a, T: ToOffset>(
3956 &'a self,
3957 range: Range<T>,
3958 ) -> Option<tree_sitter::Node<'a>> {
3959 let range = range.start.to_offset(self)..range.end.to_offset(self);
3960 let mut result: Option<tree_sitter::Node<'a>> = None;
3961
3962 for layer in self
3963 .syntax
3964 .layers_for_range(range.clone(), &self.text, true)
3965 {
3966 let mut cursor = layer.node().walk();
3967
3968 // Find the node that contains the range
3969 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
3970 continue;
3971 }
3972
3973 // Look for the previous sibling, moving up ancestor levels if needed
3974 loop {
3975 if cursor.goto_previous_sibling() {
3976 let layer_result = cursor.node();
3977
3978 if let Some(previous_result) = &result {
3979 if previous_result.byte_range().end < layer_result.byte_range().end {
3980 continue;
3981 }
3982 }
3983 result = Some(layer_result);
3984 break;
3985 }
3986
3987 // No sibling found at this level, try moving up to parent
3988 if !cursor.goto_parent() {
3989 break;
3990 }
3991 }
3992 }
3993
3994 result
3995 }
3996
3997 /// Find the next sibling syntax node at the given range.
3998 ///
3999 /// This function locates the syntax node that follows the node containing
4000 /// the given range. It searches hierarchically by:
4001 /// 1. Finding the node that contains the given range
4002 /// 2. Looking for the next sibling at the same tree level
4003 /// 3. If no sibling is found, moving up to parent levels and searching for siblings
4004 ///
4005 /// Returns `None` if there is no next sibling at any ancestor level.
4006 pub fn syntax_next_sibling<'a, T: ToOffset>(
4007 &'a self,
4008 range: Range<T>,
4009 ) -> Option<tree_sitter::Node<'a>> {
4010 let range = range.start.to_offset(self)..range.end.to_offset(self);
4011 let mut result: Option<tree_sitter::Node<'a>> = None;
4012
4013 for layer in self
4014 .syntax
4015 .layers_for_range(range.clone(), &self.text, true)
4016 {
4017 let mut cursor = layer.node().walk();
4018
4019 // Find the node that contains the range
4020 if !Self::goto_node_enclosing_range(&mut cursor, &range, false) {
4021 continue;
4022 }
4023
4024 // Look for the next sibling, moving up ancestor levels if needed
4025 loop {
4026 if cursor.goto_next_sibling() {
4027 let layer_result = cursor.node();
4028
4029 if let Some(previous_result) = &result {
4030 if previous_result.byte_range().start > layer_result.byte_range().start {
4031 continue;
4032 }
4033 }
4034 result = Some(layer_result);
4035 break;
4036 }
4037
4038 // No sibling found at this level, try moving up to parent
4039 if !cursor.goto_parent() {
4040 break;
4041 }
4042 }
4043 }
4044
4045 result
4046 }
4047
4048 /// Returns the root syntax node within the given row
4049 pub fn syntax_root_ancestor(&self, position: Anchor) -> Option<tree_sitter::Node<'_>> {
4050 let start_offset = position.to_offset(self);
4051
4052 let row = self.summary_for_anchor::<text::PointUtf16>(&position).row as usize;
4053
4054 let layer = self
4055 .syntax
4056 .layers_for_range(start_offset..start_offset, &self.text, true)
4057 .next()?;
4058
4059 let mut cursor = layer.node().walk();
4060
4061 // Descend to the first leaf that touches the start of the range.
4062 while cursor.goto_first_child_for_byte(start_offset).is_some() {
4063 if cursor.node().end_byte() == start_offset {
4064 cursor.goto_next_sibling();
4065 }
4066 }
4067
4068 // Ascend to the root node within the same row.
4069 while cursor.goto_parent() {
4070 if cursor.node().start_position().row != row {
4071 break;
4072 }
4073 }
4074
4075 Some(cursor.node())
4076 }
4077
4078 /// Returns the outline for the buffer.
4079 ///
4080 /// This method allows passing an optional [`SyntaxTheme`] to
4081 /// syntax-highlight the returned symbols.
4082 pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Outline<Anchor> {
4083 Outline::new(self.outline_items_containing(0..self.len(), true, theme))
4084 }
4085
4086 /// Returns all the symbols that contain the given position.
4087 ///
4088 /// This method allows passing an optional [`SyntaxTheme`] to
4089 /// syntax-highlight the returned symbols.
4090 pub fn symbols_containing<T: ToOffset>(
4091 &self,
4092 position: T,
4093 theme: Option<&SyntaxTheme>,
4094 ) -> Vec<OutlineItem<Anchor>> {
4095 let position = position.to_offset(self);
4096 let start = self.clip_offset(position.saturating_sub(1), Bias::Left);
4097 let end = self.clip_offset(position + 1, Bias::Right);
4098 let mut items = self.outline_items_containing(start..end, false, theme);
4099 let mut prev_depth = None;
4100 items.retain(|item| {
4101 let result = prev_depth.is_none_or(|prev_depth| item.depth > prev_depth);
4102 prev_depth = Some(item.depth);
4103 result
4104 });
4105 items
4106 }
4107
4108 pub fn outline_range_containing<T: ToOffset>(&self, range: Range<T>) -> Option<Range<Point>> {
4109 let range = range.to_offset(self);
4110 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4111 grammar.outline_config.as_ref().map(|c| &c.query)
4112 });
4113 let configs = matches
4114 .grammars()
4115 .iter()
4116 .map(|g| g.outline_config.as_ref().unwrap())
4117 .collect::<Vec<_>>();
4118
4119 while let Some(mat) = matches.peek() {
4120 let config = &configs[mat.grammar_index];
4121 let containing_item_node = maybe!({
4122 let item_node = mat.captures.iter().find_map(|cap| {
4123 if cap.index == config.item_capture_ix {
4124 Some(cap.node)
4125 } else {
4126 None
4127 }
4128 })?;
4129
4130 let item_byte_range = item_node.byte_range();
4131 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4132 None
4133 } else {
4134 Some(item_node)
4135 }
4136 });
4137
4138 if let Some(item_node) = containing_item_node {
4139 return Some(
4140 Point::from_ts_point(item_node.start_position())
4141 ..Point::from_ts_point(item_node.end_position()),
4142 );
4143 }
4144
4145 matches.advance();
4146 }
4147 None
4148 }
4149
4150 pub fn outline_items_containing<T: ToOffset>(
4151 &self,
4152 range: Range<T>,
4153 include_extra_context: bool,
4154 theme: Option<&SyntaxTheme>,
4155 ) -> Vec<OutlineItem<Anchor>> {
4156 self.outline_items_containing_internal(
4157 range,
4158 include_extra_context,
4159 theme,
4160 |this, range| this.anchor_after(range.start)..this.anchor_before(range.end),
4161 )
4162 }
4163
4164 pub fn outline_items_as_points_containing<T: ToOffset>(
4165 &self,
4166 range: Range<T>,
4167 include_extra_context: bool,
4168 theme: Option<&SyntaxTheme>,
4169 ) -> Vec<OutlineItem<Point>> {
4170 self.outline_items_containing_internal(range, include_extra_context, theme, |_, range| {
4171 range
4172 })
4173 }
4174
4175 pub fn outline_items_as_offsets_containing<T: ToOffset>(
4176 &self,
4177 range: Range<T>,
4178 include_extra_context: bool,
4179 theme: Option<&SyntaxTheme>,
4180 ) -> Vec<OutlineItem<usize>> {
4181 self.outline_items_containing_internal(
4182 range,
4183 include_extra_context,
4184 theme,
4185 |buffer, range| range.to_offset(buffer),
4186 )
4187 }
4188
4189 fn outline_items_containing_internal<T: ToOffset, U>(
4190 &self,
4191 range: Range<T>,
4192 include_extra_context: bool,
4193 theme: Option<&SyntaxTheme>,
4194 range_callback: fn(&Self, Range<Point>) -> Range<U>,
4195 ) -> Vec<OutlineItem<U>> {
4196 let range = range.to_offset(self);
4197 let mut matches = self.syntax.matches(range.clone(), &self.text, |grammar| {
4198 grammar.outline_config.as_ref().map(|c| &c.query)
4199 });
4200
4201 let mut items = Vec::new();
4202 let mut annotation_row_ranges: Vec<Range<u32>> = Vec::new();
4203 while let Some(mat) = matches.peek() {
4204 let config = matches.grammars()[mat.grammar_index]
4205 .outline_config
4206 .as_ref()
4207 .unwrap();
4208 if let Some(item) =
4209 self.next_outline_item(config, &mat, &range, include_extra_context, theme)
4210 {
4211 items.push(item);
4212 } else if let Some(capture) = mat
4213 .captures
4214 .iter()
4215 .find(|capture| Some(capture.index) == config.annotation_capture_ix)
4216 {
4217 let capture_range = capture.node.start_position()..capture.node.end_position();
4218 let mut capture_row_range =
4219 capture_range.start.row as u32..capture_range.end.row as u32;
4220 if capture_range.end.row > capture_range.start.row && capture_range.end.column == 0
4221 {
4222 capture_row_range.end -= 1;
4223 }
4224 if let Some(last_row_range) = annotation_row_ranges.last_mut() {
4225 if last_row_range.end >= capture_row_range.start.saturating_sub(1) {
4226 last_row_range.end = capture_row_range.end;
4227 } else {
4228 annotation_row_ranges.push(capture_row_range);
4229 }
4230 } else {
4231 annotation_row_ranges.push(capture_row_range);
4232 }
4233 }
4234 matches.advance();
4235 }
4236
4237 items.sort_by_key(|item| (item.range.start, Reverse(item.range.end)));
4238
4239 // Assign depths based on containment relationships and convert to anchors.
4240 let mut item_ends_stack = Vec::<Point>::new();
4241 let mut anchor_items = Vec::new();
4242 let mut annotation_row_ranges = annotation_row_ranges.into_iter().peekable();
4243 for item in items {
4244 while let Some(last_end) = item_ends_stack.last().copied() {
4245 if last_end < item.range.end {
4246 item_ends_stack.pop();
4247 } else {
4248 break;
4249 }
4250 }
4251
4252 let mut annotation_row_range = None;
4253 while let Some(next_annotation_row_range) = annotation_row_ranges.peek() {
4254 let row_preceding_item = item.range.start.row.saturating_sub(1);
4255 if next_annotation_row_range.end < row_preceding_item {
4256 annotation_row_ranges.next();
4257 } else {
4258 if next_annotation_row_range.end == row_preceding_item {
4259 annotation_row_range = Some(next_annotation_row_range.clone());
4260 annotation_row_ranges.next();
4261 }
4262 break;
4263 }
4264 }
4265
4266 anchor_items.push(OutlineItem {
4267 depth: item_ends_stack.len(),
4268 range: range_callback(self, item.range.clone()),
4269 source_range_for_text: range_callback(self, item.source_range_for_text.clone()),
4270 text: item.text,
4271 highlight_ranges: item.highlight_ranges,
4272 name_ranges: item.name_ranges,
4273 body_range: item.body_range.map(|r| range_callback(self, r)),
4274 annotation_range: annotation_row_range.map(|annotation_range| {
4275 let point_range = Point::new(annotation_range.start, 0)
4276 ..Point::new(annotation_range.end, self.line_len(annotation_range.end));
4277 range_callback(self, point_range)
4278 }),
4279 });
4280 item_ends_stack.push(item.range.end);
4281 }
4282
4283 anchor_items
4284 }
4285
4286 fn next_outline_item(
4287 &self,
4288 config: &OutlineConfig,
4289 mat: &SyntaxMapMatch,
4290 range: &Range<usize>,
4291 include_extra_context: bool,
4292 theme: Option<&SyntaxTheme>,
4293 ) -> Option<OutlineItem<Point>> {
4294 let item_node = mat.captures.iter().find_map(|cap| {
4295 if cap.index == config.item_capture_ix {
4296 Some(cap.node)
4297 } else {
4298 None
4299 }
4300 })?;
4301
4302 let item_byte_range = item_node.byte_range();
4303 if item_byte_range.end < range.start || item_byte_range.start > range.end {
4304 return None;
4305 }
4306 let item_point_range = Point::from_ts_point(item_node.start_position())
4307 ..Point::from_ts_point(item_node.end_position());
4308
4309 let mut open_point = None;
4310 let mut close_point = None;
4311
4312 let mut buffer_ranges = Vec::new();
4313 let mut add_to_buffer_ranges = |node: tree_sitter::Node, node_is_name| {
4314 let mut range = node.start_byte()..node.end_byte();
4315 let start = node.start_position();
4316 if node.end_position().row > start.row {
4317 range.end = range.start + self.line_len(start.row as u32) as usize - start.column;
4318 }
4319
4320 if !range.is_empty() {
4321 buffer_ranges.push((range, node_is_name));
4322 }
4323 };
4324
4325 for capture in mat.captures {
4326 if capture.index == config.name_capture_ix {
4327 add_to_buffer_ranges(capture.node, true);
4328 } else if Some(capture.index) == config.context_capture_ix
4329 || (Some(capture.index) == config.extra_context_capture_ix && include_extra_context)
4330 {
4331 add_to_buffer_ranges(capture.node, false);
4332 } else {
4333 if Some(capture.index) == config.open_capture_ix {
4334 open_point = Some(Point::from_ts_point(capture.node.end_position()));
4335 } else if Some(capture.index) == config.close_capture_ix {
4336 close_point = Some(Point::from_ts_point(capture.node.start_position()));
4337 }
4338 }
4339 }
4340
4341 if buffer_ranges.is_empty() {
4342 return None;
4343 }
4344 let source_range_for_text =
4345 buffer_ranges.first().unwrap().0.start..buffer_ranges.last().unwrap().0.end;
4346
4347 let mut text = String::new();
4348 let mut highlight_ranges = Vec::new();
4349 let mut name_ranges = Vec::new();
4350 let mut chunks = self.chunks(source_range_for_text.clone(), true);
4351 let mut last_buffer_range_end = 0;
4352 for (buffer_range, is_name) in buffer_ranges {
4353 let space_added = !text.is_empty() && buffer_range.start > last_buffer_range_end;
4354 if space_added {
4355 text.push(' ');
4356 }
4357 let before_append_len = text.len();
4358 let mut offset = buffer_range.start;
4359 chunks.seek(buffer_range.clone());
4360 for mut chunk in chunks.by_ref() {
4361 if chunk.text.len() > buffer_range.end - offset {
4362 chunk.text = &chunk.text[0..(buffer_range.end - offset)];
4363 offset = buffer_range.end;
4364 } else {
4365 offset += chunk.text.len();
4366 }
4367 let style = chunk
4368 .syntax_highlight_id
4369 .zip(theme)
4370 .and_then(|(highlight, theme)| highlight.style(theme));
4371 if let Some(style) = style {
4372 let start = text.len();
4373 let end = start + chunk.text.len();
4374 highlight_ranges.push((start..end, style));
4375 }
4376 text.push_str(chunk.text);
4377 if offset >= buffer_range.end {
4378 break;
4379 }
4380 }
4381 if is_name {
4382 let after_append_len = text.len();
4383 let start = if space_added && !name_ranges.is_empty() {
4384 before_append_len - 1
4385 } else {
4386 before_append_len
4387 };
4388 name_ranges.push(start..after_append_len);
4389 }
4390 last_buffer_range_end = buffer_range.end;
4391 }
4392
4393 Some(OutlineItem {
4394 depth: 0, // We'll calculate the depth later
4395 range: item_point_range,
4396 source_range_for_text: source_range_for_text.to_point(self),
4397 text,
4398 highlight_ranges,
4399 name_ranges,
4400 body_range: open_point.zip(close_point).map(|(start, end)| start..end),
4401 annotation_range: None,
4402 })
4403 }
4404
4405 pub fn function_body_fold_ranges<T: ToOffset>(
4406 &self,
4407 within: Range<T>,
4408 ) -> impl Iterator<Item = Range<usize>> + '_ {
4409 self.text_object_ranges(within, TreeSitterOptions::default())
4410 .filter_map(|(range, obj)| (obj == TextObject::InsideFunction).then_some(range))
4411 }
4412
4413 /// For each grammar in the language, runs the provided
4414 /// [`tree_sitter::Query`] against the given range.
4415 pub fn matches(
4416 &self,
4417 range: Range<usize>,
4418 query: fn(&Grammar) -> Option<&tree_sitter::Query>,
4419 ) -> SyntaxMapMatches<'_> {
4420 self.syntax.matches(range, self, query)
4421 }
4422
4423 /// Finds all [`RowChunks`] applicable to the given range, then returns all bracket pairs that intersect with those chunks.
4424 /// Hence, may return more bracket pairs than the range contains.
4425 ///
4426 /// Will omit known chunks.
4427 /// The resulting bracket match collections are not ordered.
4428 pub fn fetch_bracket_ranges(
4429 &self,
4430 range: Range<usize>,
4431 known_chunks: Option<&HashSet<Range<BufferRow>>>,
4432 ) -> HashMap<Range<BufferRow>, Vec<BracketMatch<usize>>> {
4433 let mut all_bracket_matches = HashMap::default();
4434
4435 for chunk in self
4436 .tree_sitter_data
4437 .chunks
4438 .applicable_chunks(&[range.to_point(self)])
4439 {
4440 if known_chunks.is_some_and(|chunks| chunks.contains(&chunk.row_range())) {
4441 continue;
4442 }
4443 let chunk_range = chunk.anchor_range();
4444 let chunk_range = chunk_range.to_offset(&self);
4445
4446 if let Some(cached_brackets) =
4447 &self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4448 {
4449 all_bracket_matches.insert(chunk.row_range(), cached_brackets.clone());
4450 continue;
4451 }
4452
4453 let mut all_brackets: Vec<(BracketMatch<usize>, bool)> = Vec::new();
4454 let mut opens = Vec::new();
4455 let mut color_pairs = Vec::new();
4456
4457 let mut matches = self.syntax.matches_with_options(
4458 chunk_range.clone(),
4459 &self.text,
4460 TreeSitterOptions {
4461 max_bytes_to_query: Some(MAX_BYTES_TO_QUERY),
4462 max_start_depth: None,
4463 },
4464 |grammar| grammar.brackets_config.as_ref().map(|c| &c.query),
4465 );
4466 let configs = matches
4467 .grammars()
4468 .iter()
4469 .map(|grammar| grammar.brackets_config.as_ref().unwrap())
4470 .collect::<Vec<_>>();
4471
4472 // Group matches by open range so we can either trust grammar output
4473 // or repair it by picking a single closest close per open.
4474 let mut open_to_close_ranges = BTreeMap::new();
4475 while let Some(mat) = matches.peek() {
4476 let mut open = None;
4477 let mut close = None;
4478 let syntax_layer_depth = mat.depth;
4479 let config = configs[mat.grammar_index];
4480 let pattern = &config.patterns[mat.pattern_index];
4481 for capture in mat.captures {
4482 if capture.index == config.open_capture_ix {
4483 open = Some(capture.node.byte_range());
4484 } else if capture.index == config.close_capture_ix {
4485 close = Some(capture.node.byte_range());
4486 }
4487 }
4488
4489 matches.advance();
4490
4491 let Some((open_range, close_range)) = open.zip(close) else {
4492 continue;
4493 };
4494
4495 let bracket_range = open_range.start..=close_range.end;
4496 if !bracket_range.overlaps(&chunk_range) {
4497 continue;
4498 }
4499
4500 open_to_close_ranges
4501 .entry((open_range.start, open_range.end))
4502 .or_insert_with(BTreeMap::new)
4503 .insert(
4504 (close_range.start, close_range.end),
4505 BracketMatch {
4506 open_range: open_range.clone(),
4507 close_range: close_range.clone(),
4508 syntax_layer_depth,
4509 newline_only: pattern.newline_only,
4510 color_index: None,
4511 },
4512 );
4513
4514 all_brackets.push((
4515 BracketMatch {
4516 open_range,
4517 close_range,
4518 syntax_layer_depth,
4519 newline_only: pattern.newline_only,
4520 color_index: None,
4521 },
4522 pattern.rainbow_exclude,
4523 ));
4524 }
4525
4526 let has_bogus_matches = open_to_close_ranges
4527 .iter()
4528 .any(|(_, end_ranges)| end_ranges.len() > 1);
4529 if has_bogus_matches {
4530 // Grammar is producing bogus matches where one open is paired with multiple
4531 // closes. Build a valid stack by walking through positions in order.
4532 // For each close, we know the expected open_len from tree-sitter matches.
4533
4534 // Map each close to its expected open length (for inferring opens)
4535 let close_to_open_len: HashMap<(usize, usize), usize> = all_brackets
4536 .iter()
4537 .map(|(m, _)| ((m.close_range.start, m.close_range.end), m.open_range.len()))
4538 .collect();
4539
4540 // Collect unique opens and closes within this chunk
4541 let mut unique_opens: HashSet<(usize, usize)> = all_brackets
4542 .iter()
4543 .map(|(m, _)| (m.open_range.start, m.open_range.end))
4544 .filter(|(start, _)| chunk_range.contains(start))
4545 .collect();
4546
4547 let mut unique_closes: Vec<(usize, usize)> = all_brackets
4548 .iter()
4549 .map(|(m, _)| (m.close_range.start, m.close_range.end))
4550 .filter(|(start, _)| chunk_range.contains(start))
4551 .collect();
4552 unique_closes.sort();
4553 unique_closes.dedup();
4554
4555 // Build valid pairs by walking through closes in order
4556 let mut unique_opens_vec: Vec<_> = unique_opens.iter().copied().collect();
4557 unique_opens_vec.sort();
4558
4559 let mut valid_pairs: HashSet<((usize, usize), (usize, usize))> = HashSet::default();
4560 let mut open_stack: Vec<(usize, usize)> = Vec::new();
4561 let mut open_idx = 0;
4562
4563 for close in &unique_closes {
4564 // Push all opens before this close onto stack
4565 while open_idx < unique_opens_vec.len()
4566 && unique_opens_vec[open_idx].0 < close.0
4567 {
4568 open_stack.push(unique_opens_vec[open_idx]);
4569 open_idx += 1;
4570 }
4571
4572 // Try to match with most recent open
4573 if let Some(open) = open_stack.pop() {
4574 valid_pairs.insert((open, *close));
4575 } else if let Some(&open_len) = close_to_open_len.get(close) {
4576 // No open on stack - infer one based on expected open_len
4577 if close.0 >= open_len {
4578 let inferred = (close.0 - open_len, close.0);
4579 unique_opens.insert(inferred);
4580 valid_pairs.insert((inferred, *close));
4581 all_brackets.push((
4582 BracketMatch {
4583 open_range: inferred.0..inferred.1,
4584 close_range: close.0..close.1,
4585 newline_only: false,
4586 syntax_layer_depth: 0,
4587 color_index: None,
4588 },
4589 false,
4590 ));
4591 }
4592 }
4593 }
4594
4595 all_brackets.retain(|(m, _)| {
4596 let open = (m.open_range.start, m.open_range.end);
4597 let close = (m.close_range.start, m.close_range.end);
4598 valid_pairs.contains(&(open, close))
4599 });
4600 }
4601
4602 let mut all_brackets = all_brackets
4603 .into_iter()
4604 .enumerate()
4605 .map(|(index, (bracket_match, rainbow_exclude))| {
4606 // Certain languages have "brackets" that are not brackets, e.g. tags. and such
4607 // bracket will match the entire tag with all text inside.
4608 // For now, avoid highlighting any pair that has more than single char in each bracket.
4609 // We need to colorize `<Element/>` bracket pairs, so cannot make this check stricter.
4610 let should_color = !rainbow_exclude
4611 && (bracket_match.open_range.len() == 1
4612 || bracket_match.close_range.len() == 1);
4613 if should_color {
4614 opens.push(bracket_match.open_range.clone());
4615 color_pairs.push((
4616 bracket_match.open_range.clone(),
4617 bracket_match.close_range.clone(),
4618 index,
4619 ));
4620 }
4621 bracket_match
4622 })
4623 .collect::<Vec<_>>();
4624
4625 opens.sort_by_key(|r| (r.start, r.end));
4626 opens.dedup_by(|a, b| a.start == b.start && a.end == b.end);
4627 color_pairs.sort_by_key(|(_, close, _)| close.end);
4628
4629 let mut open_stack = Vec::new();
4630 let mut open_index = 0;
4631 for (open, close, index) in color_pairs {
4632 while open_index < opens.len() && opens[open_index].start < close.start {
4633 open_stack.push(opens[open_index].clone());
4634 open_index += 1;
4635 }
4636
4637 if open_stack.last() == Some(&open) {
4638 let depth_index = open_stack.len() - 1;
4639 all_brackets[index].color_index = Some(depth_index);
4640 open_stack.pop();
4641 }
4642 }
4643
4644 all_brackets.sort_by_key(|bracket_match| {
4645 (bracket_match.open_range.start, bracket_match.open_range.end)
4646 });
4647
4648 if let empty_slot @ None =
4649 &mut self.tree_sitter_data.brackets_by_chunks.lock()[chunk.id]
4650 {
4651 *empty_slot = Some(all_brackets.clone());
4652 }
4653 all_bracket_matches.insert(chunk.row_range(), all_brackets);
4654 }
4655
4656 all_bracket_matches
4657 }
4658
4659 pub fn all_bracket_ranges(
4660 &self,
4661 range: Range<usize>,
4662 ) -> impl Iterator<Item = BracketMatch<usize>> {
4663 self.fetch_bracket_ranges(range.clone(), None)
4664 .into_values()
4665 .flatten()
4666 .filter(move |bracket_match| {
4667 let bracket_range = bracket_match.open_range.start..bracket_match.close_range.end;
4668 bracket_range.overlaps(&range)
4669 })
4670 }
4671
4672 /// Returns bracket range pairs overlapping or adjacent to `range`
4673 pub fn bracket_ranges<T: ToOffset>(
4674 &self,
4675 range: Range<T>,
4676 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4677 // Find bracket pairs that *inclusively* contain the given range.
4678 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4679 self.all_bracket_ranges(range)
4680 .filter(|pair| !pair.newline_only)
4681 }
4682
4683 pub fn debug_variables_query<T: ToOffset>(
4684 &self,
4685 range: Range<T>,
4686 ) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
4687 let range = range.start.to_previous_offset(self)..range.end.to_next_offset(self);
4688
4689 let mut matches = self.syntax.matches_with_options(
4690 range.clone(),
4691 &self.text,
4692 TreeSitterOptions::default(),
4693 |grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
4694 );
4695
4696 let configs = matches
4697 .grammars()
4698 .iter()
4699 .map(|grammar| grammar.debug_variables_config.as_ref())
4700 .collect::<Vec<_>>();
4701
4702 let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
4703
4704 iter::from_fn(move || {
4705 loop {
4706 while let Some(capture) = captures.pop() {
4707 if capture.0.overlaps(&range) {
4708 return Some(capture);
4709 }
4710 }
4711
4712 let mat = matches.peek()?;
4713
4714 let Some(config) = configs[mat.grammar_index].as_ref() else {
4715 matches.advance();
4716 continue;
4717 };
4718
4719 for capture in mat.captures {
4720 let Some(ix) = config
4721 .objects_by_capture_ix
4722 .binary_search_by_key(&capture.index, |e| e.0)
4723 .ok()
4724 else {
4725 continue;
4726 };
4727 let text_object = config.objects_by_capture_ix[ix].1;
4728 let byte_range = capture.node.byte_range();
4729
4730 let mut found = false;
4731 for (range, existing) in captures.iter_mut() {
4732 if existing == &text_object {
4733 range.start = range.start.min(byte_range.start);
4734 range.end = range.end.max(byte_range.end);
4735 found = true;
4736 break;
4737 }
4738 }
4739
4740 if !found {
4741 captures.push((byte_range, text_object));
4742 }
4743 }
4744
4745 matches.advance();
4746 }
4747 })
4748 }
4749
4750 pub fn text_object_ranges<T: ToOffset>(
4751 &self,
4752 range: Range<T>,
4753 options: TreeSitterOptions,
4754 ) -> impl Iterator<Item = (Range<usize>, TextObject)> + '_ {
4755 let range =
4756 range.start.to_previous_offset(self)..self.len().min(range.end.to_next_offset(self));
4757
4758 let mut matches =
4759 self.syntax
4760 .matches_with_options(range.clone(), &self.text, options, |grammar| {
4761 grammar.text_object_config.as_ref().map(|c| &c.query)
4762 });
4763
4764 let configs = matches
4765 .grammars()
4766 .iter()
4767 .map(|grammar| grammar.text_object_config.as_ref())
4768 .collect::<Vec<_>>();
4769
4770 let mut captures = Vec::<(Range<usize>, TextObject)>::new();
4771
4772 iter::from_fn(move || {
4773 loop {
4774 while let Some(capture) = captures.pop() {
4775 if capture.0.overlaps(&range) {
4776 return Some(capture);
4777 }
4778 }
4779
4780 let mat = matches.peek()?;
4781
4782 let Some(config) = configs[mat.grammar_index].as_ref() else {
4783 matches.advance();
4784 continue;
4785 };
4786
4787 for capture in mat.captures {
4788 let Some(ix) = config
4789 .text_objects_by_capture_ix
4790 .binary_search_by_key(&capture.index, |e| e.0)
4791 .ok()
4792 else {
4793 continue;
4794 };
4795 let text_object = config.text_objects_by_capture_ix[ix].1;
4796 let byte_range = capture.node.byte_range();
4797
4798 let mut found = false;
4799 for (range, existing) in captures.iter_mut() {
4800 if existing == &text_object {
4801 range.start = range.start.min(byte_range.start);
4802 range.end = range.end.max(byte_range.end);
4803 found = true;
4804 break;
4805 }
4806 }
4807
4808 if !found {
4809 captures.push((byte_range, text_object));
4810 }
4811 }
4812
4813 matches.advance();
4814 }
4815 })
4816 }
4817
4818 /// Returns enclosing bracket ranges containing the given range
4819 pub fn enclosing_bracket_ranges<T: ToOffset>(
4820 &self,
4821 range: Range<T>,
4822 ) -> impl Iterator<Item = BracketMatch<usize>> + '_ {
4823 let range = range.start.to_offset(self)..range.end.to_offset(self);
4824
4825 let result: Vec<_> = self.bracket_ranges(range.clone()).collect();
4826 let max_depth = result
4827 .iter()
4828 .map(|mat| mat.syntax_layer_depth)
4829 .max()
4830 .unwrap_or(0);
4831 result.into_iter().filter(move |pair| {
4832 pair.open_range.start <= range.start
4833 && pair.close_range.end >= range.end
4834 && pair.syntax_layer_depth == max_depth
4835 })
4836 }
4837
4838 /// Returns the smallest enclosing bracket ranges containing the given range or None if no brackets contain range
4839 ///
4840 /// Can optionally pass a range_filter to filter the ranges of brackets to consider
4841 pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
4842 &self,
4843 range: Range<T>,
4844 range_filter: Option<&dyn Fn(Range<usize>, Range<usize>) -> bool>,
4845 ) -> Option<(Range<usize>, Range<usize>)> {
4846 let range = range.start.to_offset(self)..range.end.to_offset(self);
4847
4848 // Get the ranges of the innermost pair of brackets.
4849 let mut result: Option<(Range<usize>, Range<usize>)> = None;
4850
4851 for pair in self.enclosing_bracket_ranges(range) {
4852 if let Some(range_filter) = range_filter
4853 && !range_filter(pair.open_range.clone(), pair.close_range.clone())
4854 {
4855 continue;
4856 }
4857
4858 let len = pair.close_range.end - pair.open_range.start;
4859
4860 if let Some((existing_open, existing_close)) = &result {
4861 let existing_len = existing_close.end - existing_open.start;
4862 if len > existing_len {
4863 continue;
4864 }
4865 }
4866
4867 result = Some((pair.open_range, pair.close_range));
4868 }
4869
4870 result
4871 }
4872
4873 /// Returns anchor ranges for any matches of the redaction query.
4874 /// The buffer can be associated with multiple languages, and the redaction query associated with each
4875 /// will be run on the relevant section of the buffer.
4876 pub fn redacted_ranges<T: ToOffset>(
4877 &self,
4878 range: Range<T>,
4879 ) -> impl Iterator<Item = Range<usize>> + '_ {
4880 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4881 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4882 grammar
4883 .redactions_config
4884 .as_ref()
4885 .map(|config| &config.query)
4886 });
4887
4888 let configs = syntax_matches
4889 .grammars()
4890 .iter()
4891 .map(|grammar| grammar.redactions_config.as_ref())
4892 .collect::<Vec<_>>();
4893
4894 iter::from_fn(move || {
4895 let redacted_range = syntax_matches
4896 .peek()
4897 .and_then(|mat| {
4898 configs[mat.grammar_index].and_then(|config| {
4899 mat.captures
4900 .iter()
4901 .find(|capture| capture.index == config.redaction_capture_ix)
4902 })
4903 })
4904 .map(|mat| mat.node.byte_range());
4905 syntax_matches.advance();
4906 redacted_range
4907 })
4908 }
4909
4910 pub fn injections_intersecting_range<T: ToOffset>(
4911 &self,
4912 range: Range<T>,
4913 ) -> impl Iterator<Item = (Range<usize>, &Arc<Language>)> + '_ {
4914 let offset_range = range.start.to_offset(self)..range.end.to_offset(self);
4915
4916 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4917 grammar
4918 .injection_config
4919 .as_ref()
4920 .map(|config| &config.query)
4921 });
4922
4923 let configs = syntax_matches
4924 .grammars()
4925 .iter()
4926 .map(|grammar| grammar.injection_config.as_ref())
4927 .collect::<Vec<_>>();
4928
4929 iter::from_fn(move || {
4930 let ranges = syntax_matches.peek().and_then(|mat| {
4931 let config = &configs[mat.grammar_index]?;
4932 let content_capture_range = mat.captures.iter().find_map(|capture| {
4933 if capture.index == config.content_capture_ix {
4934 Some(capture.node.byte_range())
4935 } else {
4936 None
4937 }
4938 })?;
4939 let language = self.language_at(content_capture_range.start)?;
4940 Some((content_capture_range, language))
4941 });
4942 syntax_matches.advance();
4943 ranges
4944 })
4945 }
4946
4947 pub fn runnable_ranges(
4948 &self,
4949 offset_range: Range<usize>,
4950 ) -> impl Iterator<Item = RunnableRange> + '_ {
4951 let mut syntax_matches = self.syntax.matches(offset_range, self, |grammar| {
4952 grammar.runnable_config.as_ref().map(|config| &config.query)
4953 });
4954
4955 let test_configs = syntax_matches
4956 .grammars()
4957 .iter()
4958 .map(|grammar| grammar.runnable_config.as_ref())
4959 .collect::<Vec<_>>();
4960
4961 iter::from_fn(move || {
4962 loop {
4963 let mat = syntax_matches.peek()?;
4964
4965 let test_range = test_configs[mat.grammar_index].and_then(|test_configs| {
4966 let mut run_range = None;
4967 let full_range = mat.captures.iter().fold(
4968 Range {
4969 start: usize::MAX,
4970 end: 0,
4971 },
4972 |mut acc, next| {
4973 let byte_range = next.node.byte_range();
4974 if acc.start > byte_range.start {
4975 acc.start = byte_range.start;
4976 }
4977 if acc.end < byte_range.end {
4978 acc.end = byte_range.end;
4979 }
4980 acc
4981 },
4982 );
4983 if full_range.start > full_range.end {
4984 // We did not find a full spanning range of this match.
4985 return None;
4986 }
4987 let extra_captures: SmallVec<[_; 1]> =
4988 SmallVec::from_iter(mat.captures.iter().filter_map(|capture| {
4989 test_configs
4990 .extra_captures
4991 .get(capture.index as usize)
4992 .cloned()
4993 .and_then(|tag_name| match tag_name {
4994 RunnableCapture::Named(name) => {
4995 Some((capture.node.byte_range(), name))
4996 }
4997 RunnableCapture::Run => {
4998 let _ = run_range.insert(capture.node.byte_range());
4999 None
5000 }
5001 })
5002 }));
5003 let run_range = run_range?;
5004 let tags = test_configs
5005 .query
5006 .property_settings(mat.pattern_index)
5007 .iter()
5008 .filter_map(|property| {
5009 if *property.key == *"tag" {
5010 property
5011 .value
5012 .as_ref()
5013 .map(|value| RunnableTag(value.to_string().into()))
5014 } else {
5015 None
5016 }
5017 })
5018 .collect();
5019 let extra_captures = extra_captures
5020 .into_iter()
5021 .map(|(range, name)| {
5022 (
5023 name.to_string(),
5024 self.text_for_range(range).collect::<String>(),
5025 )
5026 })
5027 .collect();
5028 // All tags should have the same range.
5029 Some(RunnableRange {
5030 run_range,
5031 full_range,
5032 runnable: Runnable {
5033 tags,
5034 language: mat.language,
5035 buffer: self.remote_id(),
5036 },
5037 extra_captures,
5038 buffer_id: self.remote_id(),
5039 })
5040 });
5041
5042 syntax_matches.advance();
5043 if test_range.is_some() {
5044 // It's fine for us to short-circuit on .peek()? returning None. We don't want to return None from this iter if we
5045 // had a capture that did not contain a run marker, hence we'll just loop around for the next capture.
5046 return test_range;
5047 }
5048 }
5049 })
5050 }
5051
5052 /// Returns selections for remote peers intersecting the given range.
5053 #[allow(clippy::type_complexity)]
5054 pub fn selections_in_range(
5055 &self,
5056 range: Range<Anchor>,
5057 include_local: bool,
5058 ) -> impl Iterator<
5059 Item = (
5060 ReplicaId,
5061 bool,
5062 CursorShape,
5063 impl Iterator<Item = &Selection<Anchor>> + '_,
5064 ),
5065 > + '_ {
5066 self.remote_selections
5067 .iter()
5068 .filter(move |(replica_id, set)| {
5069 (include_local || **replica_id != self.text.replica_id())
5070 && !set.selections.is_empty()
5071 })
5072 .map(move |(replica_id, set)| {
5073 let start_ix = match set.selections.binary_search_by(|probe| {
5074 probe.end.cmp(&range.start, self).then(Ordering::Greater)
5075 }) {
5076 Ok(ix) | Err(ix) => ix,
5077 };
5078 let end_ix = match set.selections.binary_search_by(|probe| {
5079 probe.start.cmp(&range.end, self).then(Ordering::Less)
5080 }) {
5081 Ok(ix) | Err(ix) => ix,
5082 };
5083
5084 (
5085 *replica_id,
5086 set.line_mode,
5087 set.cursor_shape,
5088 set.selections[start_ix..end_ix].iter(),
5089 )
5090 })
5091 }
5092
5093 /// Returns if the buffer contains any diagnostics.
5094 pub fn has_diagnostics(&self) -> bool {
5095 !self.diagnostics.is_empty()
5096 }
5097
5098 /// Returns all the diagnostics intersecting the given range.
5099 pub fn diagnostics_in_range<'a, T, O>(
5100 &'a self,
5101 search_range: Range<T>,
5102 reversed: bool,
5103 ) -> impl 'a + Iterator<Item = DiagnosticEntryRef<'a, O>>
5104 where
5105 T: 'a + Clone + ToOffset,
5106 O: 'a + FromAnchor,
5107 {
5108 let mut iterators: Vec<_> = self
5109 .diagnostics
5110 .iter()
5111 .map(|(_, collection)| {
5112 collection
5113 .range::<T, text::Anchor>(search_range.clone(), self, true, reversed)
5114 .peekable()
5115 })
5116 .collect();
5117
5118 std::iter::from_fn(move || {
5119 let (next_ix, _) = iterators
5120 .iter_mut()
5121 .enumerate()
5122 .flat_map(|(ix, iter)| Some((ix, iter.peek()?)))
5123 .min_by(|(_, a), (_, b)| {
5124 let cmp = a
5125 .range
5126 .start
5127 .cmp(&b.range.start, self)
5128 // when range is equal, sort by diagnostic severity
5129 .then(a.diagnostic.severity.cmp(&b.diagnostic.severity))
5130 // and stabilize order with group_id
5131 .then(a.diagnostic.group_id.cmp(&b.diagnostic.group_id));
5132 if reversed { cmp.reverse() } else { cmp }
5133 })?;
5134 iterators[next_ix]
5135 .next()
5136 .map(
5137 |DiagnosticEntryRef { range, diagnostic }| DiagnosticEntryRef {
5138 diagnostic,
5139 range: FromAnchor::from_anchor(&range.start, self)
5140 ..FromAnchor::from_anchor(&range.end, self),
5141 },
5142 )
5143 })
5144 }
5145
5146 /// Raw access to the diagnostic sets. Typically `diagnostic_groups` or `diagnostic_group`
5147 /// should be used instead.
5148 pub fn diagnostic_sets(&self) -> &SmallVec<[(LanguageServerId, DiagnosticSet); 2]> {
5149 &self.diagnostics
5150 }
5151
5152 /// Returns all the diagnostic groups associated with the given
5153 /// language server ID. If no language server ID is provided,
5154 /// all diagnostics groups are returned.
5155 pub fn diagnostic_groups(
5156 &self,
5157 language_server_id: Option<LanguageServerId>,
5158 ) -> Vec<(LanguageServerId, DiagnosticGroup<'_, Anchor>)> {
5159 let mut groups = Vec::new();
5160
5161 if let Some(language_server_id) = language_server_id {
5162 if let Ok(ix) = self
5163 .diagnostics
5164 .binary_search_by_key(&language_server_id, |e| e.0)
5165 {
5166 self.diagnostics[ix]
5167 .1
5168 .groups(language_server_id, &mut groups, self);
5169 }
5170 } else {
5171 for (language_server_id, diagnostics) in self.diagnostics.iter() {
5172 diagnostics.groups(*language_server_id, &mut groups, self);
5173 }
5174 }
5175
5176 groups.sort_by(|(id_a, group_a), (id_b, group_b)| {
5177 let a_start = &group_a.entries[group_a.primary_ix].range.start;
5178 let b_start = &group_b.entries[group_b.primary_ix].range.start;
5179 a_start.cmp(b_start, self).then_with(|| id_a.cmp(id_b))
5180 });
5181
5182 groups
5183 }
5184
5185 /// Returns an iterator over the diagnostics for the given group.
5186 pub fn diagnostic_group<O>(
5187 &self,
5188 group_id: usize,
5189 ) -> impl Iterator<Item = DiagnosticEntryRef<'_, O>> + use<'_, O>
5190 where
5191 O: FromAnchor + 'static,
5192 {
5193 self.diagnostics
5194 .iter()
5195 .flat_map(move |(_, set)| set.group(group_id, self))
5196 }
5197
5198 /// An integer version number that accounts for all updates besides
5199 /// the buffer's text itself (which is versioned via a version vector).
5200 pub fn non_text_state_update_count(&self) -> usize {
5201 self.non_text_state_update_count
5202 }
5203
5204 /// An integer version that changes when the buffer's syntax changes.
5205 pub fn syntax_update_count(&self) -> usize {
5206 self.syntax.update_count()
5207 }
5208
5209 /// Returns a snapshot of underlying file.
5210 pub fn file(&self) -> Option<&Arc<dyn File>> {
5211 self.file.as_ref()
5212 }
5213
5214 pub fn resolve_file_path(&self, include_root: bool, cx: &App) -> Option<String> {
5215 if let Some(file) = self.file() {
5216 if file.path().file_name().is_none() || include_root {
5217 Some(file.full_path(cx).to_string_lossy().into_owned())
5218 } else {
5219 Some(file.path().display(file.path_style(cx)).to_string())
5220 }
5221 } else {
5222 None
5223 }
5224 }
5225
5226 pub fn words_in_range(&self, query: WordsQuery) -> BTreeMap<String, Range<Anchor>> {
5227 let query_str = query.fuzzy_contents;
5228 if query_str.is_some_and(|query| query.is_empty()) {
5229 return BTreeMap::default();
5230 }
5231
5232 let classifier = CharClassifier::new(self.language.clone().map(|language| LanguageScope {
5233 language,
5234 override_id: None,
5235 }));
5236
5237 let mut query_ix = 0;
5238 let query_chars = query_str.map(|query| query.chars().collect::<Vec<_>>());
5239 let query_len = query_chars.as_ref().map_or(0, |query| query.len());
5240
5241 let mut words = BTreeMap::default();
5242 let mut current_word_start_ix = None;
5243 let mut chunk_ix = query.range.start;
5244 for chunk in self.chunks(query.range, false) {
5245 for (i, c) in chunk.text.char_indices() {
5246 let ix = chunk_ix + i;
5247 if classifier.is_word(c) {
5248 if current_word_start_ix.is_none() {
5249 current_word_start_ix = Some(ix);
5250 }
5251
5252 if let Some(query_chars) = &query_chars
5253 && query_ix < query_len
5254 && c.to_lowercase().eq(query_chars[query_ix].to_lowercase())
5255 {
5256 query_ix += 1;
5257 }
5258 continue;
5259 } else if let Some(word_start) = current_word_start_ix.take()
5260 && query_ix == query_len
5261 {
5262 let word_range = self.anchor_before(word_start)..self.anchor_after(ix);
5263 let mut word_text = self.text_for_range(word_start..ix).peekable();
5264 let first_char = word_text
5265 .peek()
5266 .and_then(|first_chunk| first_chunk.chars().next());
5267 // Skip empty and "words" starting with digits as a heuristic to reduce useless completions
5268 if !query.skip_digits
5269 || first_char.is_none_or(|first_char| !first_char.is_digit(10))
5270 {
5271 words.insert(word_text.collect(), word_range);
5272 }
5273 }
5274 query_ix = 0;
5275 }
5276 chunk_ix += chunk.text.len();
5277 }
5278
5279 words
5280 }
5281}
5282
5283pub struct WordsQuery<'a> {
5284 /// Only returns words with all chars from the fuzzy string in them.
5285 pub fuzzy_contents: Option<&'a str>,
5286 /// Skips words that start with a digit.
5287 pub skip_digits: bool,
5288 /// Buffer offset range, to look for words.
5289 pub range: Range<usize>,
5290}
5291
5292fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
5293 indent_size_for_text(text.chars_at(Point::new(row, 0)))
5294}
5295
5296fn indent_size_for_text(text: impl Iterator<Item = char>) -> IndentSize {
5297 let mut result = IndentSize::spaces(0);
5298 for c in text {
5299 let kind = match c {
5300 ' ' => IndentKind::Space,
5301 '\t' => IndentKind::Tab,
5302 _ => break,
5303 };
5304 if result.len == 0 {
5305 result.kind = kind;
5306 }
5307 result.len += 1;
5308 }
5309 result
5310}
5311
5312impl Clone for BufferSnapshot {
5313 fn clone(&self) -> Self {
5314 Self {
5315 text: self.text.clone(),
5316 syntax: self.syntax.clone(),
5317 file: self.file.clone(),
5318 remote_selections: self.remote_selections.clone(),
5319 diagnostics: self.diagnostics.clone(),
5320 language: self.language.clone(),
5321 tree_sitter_data: self.tree_sitter_data.clone(),
5322 non_text_state_update_count: self.non_text_state_update_count,
5323 capability: self.capability,
5324 }
5325 }
5326}
5327
5328impl Deref for BufferSnapshot {
5329 type Target = text::BufferSnapshot;
5330
5331 fn deref(&self) -> &Self::Target {
5332 &self.text
5333 }
5334}
5335
5336unsafe impl Send for BufferChunks<'_> {}
5337
5338impl<'a> BufferChunks<'a> {
5339 pub(crate) fn new(
5340 text: &'a Rope,
5341 range: Range<usize>,
5342 syntax: Option<(SyntaxMapCaptures<'a>, Vec<HighlightMap>)>,
5343 diagnostics: bool,
5344 buffer_snapshot: Option<&'a BufferSnapshot>,
5345 ) -> Self {
5346 let mut highlights = None;
5347 if let Some((captures, highlight_maps)) = syntax {
5348 highlights = Some(BufferChunkHighlights {
5349 captures,
5350 next_capture: None,
5351 stack: Default::default(),
5352 highlight_maps,
5353 })
5354 }
5355
5356 let diagnostic_endpoints = diagnostics.then(|| Vec::new().into_iter().peekable());
5357 let chunks = text.chunks_in_range(range.clone());
5358
5359 let mut this = BufferChunks {
5360 range,
5361 buffer_snapshot,
5362 chunks,
5363 diagnostic_endpoints,
5364 error_depth: 0,
5365 warning_depth: 0,
5366 information_depth: 0,
5367 hint_depth: 0,
5368 unnecessary_depth: 0,
5369 underline: true,
5370 highlights,
5371 };
5372 this.initialize_diagnostic_endpoints();
5373 this
5374 }
5375
5376 /// Seeks to the given byte offset in the buffer.
5377 pub fn seek(&mut self, range: Range<usize>) {
5378 let old_range = std::mem::replace(&mut self.range, range.clone());
5379 self.chunks.set_range(self.range.clone());
5380 if let Some(highlights) = self.highlights.as_mut() {
5381 if old_range.start <= self.range.start && old_range.end >= self.range.end {
5382 // Reuse existing highlights stack, as the new range is a subrange of the old one.
5383 highlights
5384 .stack
5385 .retain(|(end_offset, _)| *end_offset > range.start);
5386 if let Some(capture) = &highlights.next_capture
5387 && range.start >= capture.node.start_byte()
5388 {
5389 let next_capture_end = capture.node.end_byte();
5390 if range.start < next_capture_end {
5391 highlights.stack.push((
5392 next_capture_end,
5393 highlights.highlight_maps[capture.grammar_index].get(capture.index),
5394 ));
5395 }
5396 highlights.next_capture.take();
5397 }
5398 } else if let Some(snapshot) = self.buffer_snapshot {
5399 let (captures, highlight_maps) = snapshot.get_highlights(self.range.clone());
5400 *highlights = BufferChunkHighlights {
5401 captures,
5402 next_capture: None,
5403 stack: Default::default(),
5404 highlight_maps,
5405 };
5406 } else {
5407 // We cannot obtain new highlights for a language-aware buffer iterator, as we don't have a buffer snapshot.
5408 // Seeking such BufferChunks is not supported.
5409 debug_assert!(
5410 false,
5411 "Attempted to seek on a language-aware buffer iterator without associated buffer snapshot"
5412 );
5413 }
5414
5415 highlights.captures.set_byte_range(self.range.clone());
5416 self.initialize_diagnostic_endpoints();
5417 }
5418 }
5419
5420 fn initialize_diagnostic_endpoints(&mut self) {
5421 if let Some(diagnostics) = self.diagnostic_endpoints.as_mut()
5422 && let Some(buffer) = self.buffer_snapshot
5423 {
5424 let mut diagnostic_endpoints = Vec::new();
5425 for entry in buffer.diagnostics_in_range::<_, usize>(self.range.clone(), false) {
5426 diagnostic_endpoints.push(DiagnosticEndpoint {
5427 offset: entry.range.start,
5428 is_start: true,
5429 severity: entry.diagnostic.severity,
5430 is_unnecessary: entry.diagnostic.is_unnecessary,
5431 underline: entry.diagnostic.underline,
5432 });
5433 diagnostic_endpoints.push(DiagnosticEndpoint {
5434 offset: entry.range.end,
5435 is_start: false,
5436 severity: entry.diagnostic.severity,
5437 is_unnecessary: entry.diagnostic.is_unnecessary,
5438 underline: entry.diagnostic.underline,
5439 });
5440 }
5441 diagnostic_endpoints
5442 .sort_unstable_by_key(|endpoint| (endpoint.offset, !endpoint.is_start));
5443 *diagnostics = diagnostic_endpoints.into_iter().peekable();
5444 self.hint_depth = 0;
5445 self.error_depth = 0;
5446 self.warning_depth = 0;
5447 self.information_depth = 0;
5448 }
5449 }
5450
5451 /// The current byte offset in the buffer.
5452 pub fn offset(&self) -> usize {
5453 self.range.start
5454 }
5455
5456 pub fn range(&self) -> Range<usize> {
5457 self.range.clone()
5458 }
5459
5460 fn update_diagnostic_depths(&mut self, endpoint: DiagnosticEndpoint) {
5461 let depth = match endpoint.severity {
5462 DiagnosticSeverity::ERROR => &mut self.error_depth,
5463 DiagnosticSeverity::WARNING => &mut self.warning_depth,
5464 DiagnosticSeverity::INFORMATION => &mut self.information_depth,
5465 DiagnosticSeverity::HINT => &mut self.hint_depth,
5466 _ => return,
5467 };
5468 if endpoint.is_start {
5469 *depth += 1;
5470 } else {
5471 *depth -= 1;
5472 }
5473
5474 if endpoint.is_unnecessary {
5475 if endpoint.is_start {
5476 self.unnecessary_depth += 1;
5477 } else {
5478 self.unnecessary_depth -= 1;
5479 }
5480 }
5481 }
5482
5483 fn current_diagnostic_severity(&self) -> Option<DiagnosticSeverity> {
5484 if self.error_depth > 0 {
5485 Some(DiagnosticSeverity::ERROR)
5486 } else if self.warning_depth > 0 {
5487 Some(DiagnosticSeverity::WARNING)
5488 } else if self.information_depth > 0 {
5489 Some(DiagnosticSeverity::INFORMATION)
5490 } else if self.hint_depth > 0 {
5491 Some(DiagnosticSeverity::HINT)
5492 } else {
5493 None
5494 }
5495 }
5496
5497 fn current_code_is_unnecessary(&self) -> bool {
5498 self.unnecessary_depth > 0
5499 }
5500}
5501
5502impl<'a> Iterator for BufferChunks<'a> {
5503 type Item = Chunk<'a>;
5504
5505 fn next(&mut self) -> Option<Self::Item> {
5506 let mut next_capture_start = usize::MAX;
5507 let mut next_diagnostic_endpoint = usize::MAX;
5508
5509 if let Some(highlights) = self.highlights.as_mut() {
5510 while let Some((parent_capture_end, _)) = highlights.stack.last() {
5511 if *parent_capture_end <= self.range.start {
5512 highlights.stack.pop();
5513 } else {
5514 break;
5515 }
5516 }
5517
5518 if highlights.next_capture.is_none() {
5519 highlights.next_capture = highlights.captures.next();
5520 }
5521
5522 while let Some(capture) = highlights.next_capture.as_ref() {
5523 if self.range.start < capture.node.start_byte() {
5524 next_capture_start = capture.node.start_byte();
5525 break;
5526 } else {
5527 let highlight_id =
5528 highlights.highlight_maps[capture.grammar_index].get(capture.index);
5529 highlights
5530 .stack
5531 .push((capture.node.end_byte(), highlight_id));
5532 highlights.next_capture = highlights.captures.next();
5533 }
5534 }
5535 }
5536
5537 let mut diagnostic_endpoints = std::mem::take(&mut self.diagnostic_endpoints);
5538 if let Some(diagnostic_endpoints) = diagnostic_endpoints.as_mut() {
5539 while let Some(endpoint) = diagnostic_endpoints.peek().copied() {
5540 if endpoint.offset <= self.range.start {
5541 self.update_diagnostic_depths(endpoint);
5542 diagnostic_endpoints.next();
5543 self.underline = endpoint.underline;
5544 } else {
5545 next_diagnostic_endpoint = endpoint.offset;
5546 break;
5547 }
5548 }
5549 }
5550 self.diagnostic_endpoints = diagnostic_endpoints;
5551
5552 if let Some(ChunkBitmaps {
5553 text: chunk,
5554 chars: chars_map,
5555 tabs,
5556 }) = self.chunks.peek_with_bitmaps()
5557 {
5558 let chunk_start = self.range.start;
5559 let mut chunk_end = (self.chunks.offset() + chunk.len())
5560 .min(next_capture_start)
5561 .min(next_diagnostic_endpoint);
5562 let mut highlight_id = None;
5563 if let Some(highlights) = self.highlights.as_ref()
5564 && let Some((parent_capture_end, parent_highlight_id)) = highlights.stack.last()
5565 {
5566 chunk_end = chunk_end.min(*parent_capture_end);
5567 highlight_id = Some(*parent_highlight_id);
5568 }
5569 let bit_start = chunk_start - self.chunks.offset();
5570 let bit_end = chunk_end - self.chunks.offset();
5571
5572 let slice = &chunk[bit_start..bit_end];
5573
5574 let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
5575 let tabs = (tabs >> bit_start) & mask;
5576 let chars = (chars_map >> bit_start) & mask;
5577
5578 self.range.start = chunk_end;
5579 if self.range.start == self.chunks.offset() + chunk.len() {
5580 self.chunks.next().unwrap();
5581 }
5582
5583 Some(Chunk {
5584 text: slice,
5585 syntax_highlight_id: highlight_id,
5586 underline: self.underline,
5587 diagnostic_severity: self.current_diagnostic_severity(),
5588 is_unnecessary: self.current_code_is_unnecessary(),
5589 tabs,
5590 chars,
5591 ..Chunk::default()
5592 })
5593 } else {
5594 None
5595 }
5596 }
5597}
5598
5599impl operation_queue::Operation for Operation {
5600 fn lamport_timestamp(&self) -> clock::Lamport {
5601 match self {
5602 Operation::Buffer(_) => {
5603 unreachable!("buffer operations should never be deferred at this layer")
5604 }
5605 Operation::UpdateDiagnostics {
5606 lamport_timestamp, ..
5607 }
5608 | Operation::UpdateSelections {
5609 lamport_timestamp, ..
5610 }
5611 | Operation::UpdateCompletionTriggers {
5612 lamport_timestamp, ..
5613 }
5614 | Operation::UpdateLineEnding {
5615 lamport_timestamp, ..
5616 } => *lamport_timestamp,
5617 }
5618 }
5619}
5620
5621impl Default for Diagnostic {
5622 fn default() -> Self {
5623 Self {
5624 source: Default::default(),
5625 source_kind: DiagnosticSourceKind::Other,
5626 code: None,
5627 code_description: None,
5628 severity: DiagnosticSeverity::ERROR,
5629 message: Default::default(),
5630 markdown: None,
5631 group_id: 0,
5632 is_primary: false,
5633 is_disk_based: false,
5634 is_unnecessary: false,
5635 underline: true,
5636 data: None,
5637 registration_id: None,
5638 }
5639 }
5640}
5641
5642impl IndentSize {
5643 /// Returns an [`IndentSize`] representing the given spaces.
5644 pub fn spaces(len: u32) -> Self {
5645 Self {
5646 len,
5647 kind: IndentKind::Space,
5648 }
5649 }
5650
5651 /// Returns an [`IndentSize`] representing a tab.
5652 pub fn tab() -> Self {
5653 Self {
5654 len: 1,
5655 kind: IndentKind::Tab,
5656 }
5657 }
5658
5659 /// An iterator over the characters represented by this [`IndentSize`].
5660 pub fn chars(&self) -> impl Iterator<Item = char> {
5661 iter::repeat(self.char()).take(self.len as usize)
5662 }
5663
5664 /// The character representation of this [`IndentSize`].
5665 pub fn char(&self) -> char {
5666 match self.kind {
5667 IndentKind::Space => ' ',
5668 IndentKind::Tab => '\t',
5669 }
5670 }
5671
5672 /// Consumes the current [`IndentSize`] and returns a new one that has
5673 /// been shrunk or enlarged by the given size along the given direction.
5674 pub fn with_delta(mut self, direction: Ordering, size: IndentSize) -> Self {
5675 match direction {
5676 Ordering::Less => {
5677 if self.kind == size.kind && self.len >= size.len {
5678 self.len -= size.len;
5679 }
5680 }
5681 Ordering::Equal => {}
5682 Ordering::Greater => {
5683 if self.len == 0 {
5684 self = size;
5685 } else if self.kind == size.kind {
5686 self.len += size.len;
5687 }
5688 }
5689 }
5690 self
5691 }
5692
5693 pub fn len_with_expanded_tabs(&self, tab_size: NonZeroU32) -> usize {
5694 match self.kind {
5695 IndentKind::Space => self.len as usize,
5696 IndentKind::Tab => self.len as usize * tab_size.get() as usize,
5697 }
5698 }
5699}
5700
5701#[cfg(any(test, feature = "test-support"))]
5702pub struct TestFile {
5703 pub path: Arc<RelPath>,
5704 pub root_name: String,
5705 pub local_root: Option<PathBuf>,
5706}
5707
5708#[cfg(any(test, feature = "test-support"))]
5709impl File for TestFile {
5710 fn path(&self) -> &Arc<RelPath> {
5711 &self.path
5712 }
5713
5714 fn full_path(&self, _: &gpui::App) -> PathBuf {
5715 PathBuf::from(self.root_name.clone()).join(self.path.as_std_path())
5716 }
5717
5718 fn as_local(&self) -> Option<&dyn LocalFile> {
5719 if self.local_root.is_some() {
5720 Some(self)
5721 } else {
5722 None
5723 }
5724 }
5725
5726 fn disk_state(&self) -> DiskState {
5727 unimplemented!()
5728 }
5729
5730 fn file_name<'a>(&'a self, _: &'a gpui::App) -> &'a str {
5731 self.path().file_name().unwrap_or(self.root_name.as_ref())
5732 }
5733
5734 fn worktree_id(&self, _: &App) -> WorktreeId {
5735 WorktreeId::from_usize(0)
5736 }
5737
5738 fn to_proto(&self, _: &App) -> rpc::proto::File {
5739 unimplemented!()
5740 }
5741
5742 fn is_private(&self) -> bool {
5743 false
5744 }
5745
5746 fn path_style(&self, _cx: &App) -> PathStyle {
5747 PathStyle::local()
5748 }
5749}
5750
5751#[cfg(any(test, feature = "test-support"))]
5752impl LocalFile for TestFile {
5753 fn abs_path(&self, _cx: &App) -> PathBuf {
5754 PathBuf::from(self.local_root.as_ref().unwrap())
5755 .join(&self.root_name)
5756 .join(self.path.as_std_path())
5757 }
5758
5759 fn load(&self, _cx: &App) -> Task<Result<String>> {
5760 unimplemented!()
5761 }
5762
5763 fn load_bytes(&self, _cx: &App) -> Task<Result<Vec<u8>>> {
5764 unimplemented!()
5765 }
5766}
5767
5768pub(crate) fn contiguous_ranges(
5769 values: impl Iterator<Item = u32>,
5770 max_len: usize,
5771) -> impl Iterator<Item = Range<u32>> {
5772 let mut values = values;
5773 let mut current_range: Option<Range<u32>> = None;
5774 std::iter::from_fn(move || {
5775 loop {
5776 if let Some(value) = values.next() {
5777 if let Some(range) = &mut current_range
5778 && value == range.end
5779 && range.len() < max_len
5780 {
5781 range.end += 1;
5782 continue;
5783 }
5784
5785 let prev_range = current_range.clone();
5786 current_range = Some(value..(value + 1));
5787 if prev_range.is_some() {
5788 return prev_range;
5789 }
5790 } else {
5791 return current_range.take();
5792 }
5793 }
5794 })
5795}
5796
5797#[derive(Default, Debug)]
5798pub struct CharClassifier {
5799 scope: Option<LanguageScope>,
5800 scope_context: Option<CharScopeContext>,
5801 ignore_punctuation: bool,
5802}
5803
5804impl CharClassifier {
5805 pub fn new(scope: Option<LanguageScope>) -> Self {
5806 Self {
5807 scope,
5808 scope_context: None,
5809 ignore_punctuation: false,
5810 }
5811 }
5812
5813 pub fn scope_context(self, scope_context: Option<CharScopeContext>) -> Self {
5814 Self {
5815 scope_context,
5816 ..self
5817 }
5818 }
5819
5820 pub fn ignore_punctuation(self, ignore_punctuation: bool) -> Self {
5821 Self {
5822 ignore_punctuation,
5823 ..self
5824 }
5825 }
5826
5827 pub fn is_whitespace(&self, c: char) -> bool {
5828 self.kind(c) == CharKind::Whitespace
5829 }
5830
5831 pub fn is_word(&self, c: char) -> bool {
5832 self.kind(c) == CharKind::Word
5833 }
5834
5835 pub fn is_punctuation(&self, c: char) -> bool {
5836 self.kind(c) == CharKind::Punctuation
5837 }
5838
5839 pub fn kind_with(&self, c: char, ignore_punctuation: bool) -> CharKind {
5840 if c.is_alphanumeric() || c == '_' {
5841 return CharKind::Word;
5842 }
5843
5844 if let Some(scope) = &self.scope {
5845 let characters = match self.scope_context {
5846 Some(CharScopeContext::Completion) => scope.completion_query_characters(),
5847 Some(CharScopeContext::LinkedEdit) => scope.linked_edit_characters(),
5848 None => scope.word_characters(),
5849 };
5850 if let Some(characters) = characters
5851 && characters.contains(&c)
5852 {
5853 return CharKind::Word;
5854 }
5855 }
5856
5857 if c.is_whitespace() {
5858 return CharKind::Whitespace;
5859 }
5860
5861 if ignore_punctuation {
5862 CharKind::Word
5863 } else {
5864 CharKind::Punctuation
5865 }
5866 }
5867
5868 pub fn kind(&self, c: char) -> CharKind {
5869 self.kind_with(c, self.ignore_punctuation)
5870 }
5871}
5872
5873/// Find all of the ranges of whitespace that occur at the ends of lines
5874/// in the given rope.
5875///
5876/// This could also be done with a regex search, but this implementation
5877/// avoids copying text.
5878pub fn trailing_whitespace_ranges(rope: &Rope) -> Vec<Range<usize>> {
5879 let mut ranges = Vec::new();
5880
5881 let mut offset = 0;
5882 let mut prev_chunk_trailing_whitespace_range = 0..0;
5883 for chunk in rope.chunks() {
5884 let mut prev_line_trailing_whitespace_range = 0..0;
5885 for (i, line) in chunk.split('\n').enumerate() {
5886 let line_end_offset = offset + line.len();
5887 let trimmed_line_len = line.trim_end_matches([' ', '\t']).len();
5888 let mut trailing_whitespace_range = (offset + trimmed_line_len)..line_end_offset;
5889
5890 if i == 0 && trimmed_line_len == 0 {
5891 trailing_whitespace_range.start = prev_chunk_trailing_whitespace_range.start;
5892 }
5893 if !prev_line_trailing_whitespace_range.is_empty() {
5894 ranges.push(prev_line_trailing_whitespace_range);
5895 }
5896
5897 offset = line_end_offset + 1;
5898 prev_line_trailing_whitespace_range = trailing_whitespace_range;
5899 }
5900
5901 offset -= 1;
5902 prev_chunk_trailing_whitespace_range = prev_line_trailing_whitespace_range;
5903 }
5904
5905 if !prev_chunk_trailing_whitespace_range.is_empty() {
5906 ranges.push(prev_chunk_trailing_whitespace_range);
5907 }
5908
5909 ranges
5910}